From 0a79de4eccfbbea2b4d7bf287350f6ab24cb0682 Mon Sep 17 00:00:00 2001 From: Mark Lobodzinski Date: Fri, 4 May 2018 09:33:39 -0600 Subject: [PATCH] headers: Added 1.74.0 Vulkan header/registry files --- include/vulkan/vk_platform.h | 92 + include/vulkan/vulkan.h | 79 + include/vulkan/vulkan.hpp | 42554 ++++++++++++++++++++++++++++++++++ include/vulkan/vulkan_android.h | 126 + include/vulkan/vulkan_core.h | 7470 ++++++ include/vulkan/vulkan_ios.h | 58 + include/vulkan/vulkan_macos.h | 58 + include/vulkan/vulkan_mir.h | 65 + include/vulkan/vulkan_vi.h | 58 + include/vulkan/vulkan_wayland.h | 65 + include/vulkan/vulkan_win32.h | 276 + include/vulkan/vulkan_xcb.h | 66 + include/vulkan/vulkan_xlib.h | 66 + include/vulkan/vulkan_xlib_xrandr.h | 54 + registry/cgenerator.py | 417 + registry/generator.py | 595 + registry/genvk.py | 529 + registry/reg.py | 1059 + registry/validusage.json | 18392 +++++++++++++++ registry/vk.xml | 8699 +++++++ 20 files changed, 80778 insertions(+) create mode 100644 include/vulkan/vk_platform.h create mode 100644 include/vulkan/vulkan.h create mode 100644 include/vulkan/vulkan.hpp create mode 100644 include/vulkan/vulkan_android.h create mode 100644 include/vulkan/vulkan_core.h create mode 100644 include/vulkan/vulkan_ios.h create mode 100644 include/vulkan/vulkan_macos.h create mode 100644 include/vulkan/vulkan_mir.h create mode 100644 include/vulkan/vulkan_vi.h create mode 100644 include/vulkan/vulkan_wayland.h create mode 100644 include/vulkan/vulkan_win32.h create mode 100644 include/vulkan/vulkan_xcb.h create mode 100644 include/vulkan/vulkan_xlib.h create mode 100644 include/vulkan/vulkan_xlib_xrandr.h create mode 100644 registry/cgenerator.py create mode 100644 registry/generator.py create mode 100644 registry/genvk.py create mode 100644 registry/reg.py create mode 100644 registry/validusage.json create mode 100644 registry/vk.xml diff --git a/include/vulkan/vk_platform.h b/include/vulkan/vk_platform.h new file mode 100644 index 0000000..7289299 --- /dev/null +++ b/include/vulkan/vk_platform.h @@ -0,0 +1,92 @@ +// +// File: vk_platform.h +// +/* +** Copyright (c) 2014-2017 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + +#ifndef VK_PLATFORM_H_ +#define VK_PLATFORM_H_ + +#ifdef __cplusplus +extern "C" +{ +#endif // __cplusplus + +/* +*************************************************************************************************** +* Platform-specific directives and type declarations +*************************************************************************************************** +*/ + +/* Platform-specific calling convention macros. + * + * Platforms should define these so that Vulkan clients call Vulkan commands + * with the same calling conventions that the Vulkan implementation expects. + * + * VKAPI_ATTR - Placed before the return type in function declarations. + * Useful for C++11 and GCC/Clang-style function attribute syntax. + * VKAPI_CALL - Placed after the return type in function declarations. + * Useful for MSVC-style calling convention syntax. + * VKAPI_PTR - Placed between the '(' and '*' in function pointer types. + * + * Function declaration: VKAPI_ATTR void VKAPI_CALL vkCommand(void); + * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void); + */ +#if defined(_WIN32) + // On Windows, Vulkan commands use the stdcall convention + #define VKAPI_ATTR + #define VKAPI_CALL __stdcall + #define VKAPI_PTR VKAPI_CALL +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 + #error "Vulkan isn't supported for the 'armeabi' NDK ABI" +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) + // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" + // calling convention, i.e. float parameters are passed in registers. This + // is true even if the rest of the application passes floats on the stack, + // as it does by default when compiling for the armeabi-v7a NDK ABI. + #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp"))) + #define VKAPI_CALL + #define VKAPI_PTR VKAPI_ATTR +#else + // On other platforms, use the default calling convention + #define VKAPI_ATTR + #define VKAPI_CALL + #define VKAPI_PTR +#endif + +#include + +#if !defined(VK_NO_STDINT_H) + #if defined(_MSC_VER) && (_MSC_VER < 1600) + typedef signed __int8 int8_t; + typedef unsigned __int8 uint8_t; + typedef signed __int16 int16_t; + typedef unsigned __int16 uint16_t; + typedef signed __int32 int32_t; + typedef unsigned __int32 uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include + #endif +#endif // !defined(VK_NO_STDINT_H) + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif diff --git a/include/vulkan/vulkan.h b/include/vulkan/vulkan.h new file mode 100644 index 0000000..d05c849 --- /dev/null +++ b/include/vulkan/vulkan.h @@ -0,0 +1,79 @@ +#ifndef VULKAN_H_ +#define VULKAN_H_ 1 + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include "vk_platform.h" +#include "vulkan_core.h" + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#include "vulkan_android.h" +#endif + + +#ifdef VK_USE_PLATFORM_IOS_MVK +#include "vulkan_ios.h" +#endif + + +#ifdef VK_USE_PLATFORM_MACOS_MVK +#include "vulkan_macos.h" +#endif + + +#ifdef VK_USE_PLATFORM_MIR_KHR +#include +#include "vulkan_mir.h" +#endif + + +#ifdef VK_USE_PLATFORM_VI_NN +#include "vulkan_vi.h" +#endif + + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include +#include "vulkan_wayland.h" +#endif + + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include +#include "vulkan_win32.h" +#endif + + +#ifdef VK_USE_PLATFORM_XCB_KHR +#include +#include "vulkan_xcb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include +#include "vulkan_xlib.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT +#include +#include +#include "vulkan_xlib_xrandr.h" +#endif + +#endif // VULKAN_H_ diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp new file mode 100644 index 0000000..3ea82c8 --- /dev/null +++ b/include/vulkan/vulkan.hpp @@ -0,0 +1,42554 @@ +// Copyright (c) 2015-2018 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP +#define VULKAN_HPP + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# include +# include +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if !defined(VULKAN_HPP_ASSERT) +# include +# define VULKAN_HPP_ASSERT assert +#endif +static_assert( VK_HEADER_VERSION == 74 , "Wrong VK_HEADER_VERSION!" ); + +// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. +// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif +#endif + +#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS) +# if defined(__clang__) +# if __has_feature(cxx_unrestricted_unions) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined(__GNUC__) +# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined(_MSC_VER) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# endif +#endif + +#if !defined(VULKAN_HPP_INLINE) +# if defined(__clang___) +# if __has_attribute(always_inline) +# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined(__GNUC__) +# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +# elif defined(_MSC_VER) +# define VULKAN_HPP_INLINE __forceinline +# else +# define VULKAN_HPP_INLINE inline +# endif +#endif + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined(_MSC_VER) && (_MSC_VER <= 1800) +# define VULKAN_HPP_CONSTEXPR +#else +# define VULKAN_HPP_CONSTEXPR constexpr +#endif + + +#if !defined(VULKAN_HPP_NAMESPACE) +#define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2(text) #text +#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE) + +namespace VULKAN_HPP_NAMESPACE +{ + + template struct FlagTraits + { + enum { allFlags = 0 }; + }; + + template + class Flags + { + public: + VULKAN_HPP_CONSTEXPR Flags() + : m_mask(0) + { + } + + Flags(BitType bit) + : m_mask(static_cast(bit)) + { + } + + Flags(Flags const& rhs) + : m_mask(rhs.m_mask) + { + } + + explicit Flags(MaskType flags) + : m_mask(flags) + { + } + + Flags & operator=(Flags const& rhs) + { + m_mask = rhs.m_mask; + return *this; + } + + Flags & operator|=(Flags const& rhs) + { + m_mask |= rhs.m_mask; + return *this; + } + + Flags & operator&=(Flags const& rhs) + { + m_mask &= rhs.m_mask; + return *this; + } + + Flags & operator^=(Flags const& rhs) + { + m_mask ^= rhs.m_mask; + return *this; + } + + Flags operator|(Flags const& rhs) const + { + Flags result(*this); + result |= rhs; + return result; + } + + Flags operator&(Flags const& rhs) const + { + Flags result(*this); + result &= rhs; + return result; + } + + Flags operator^(Flags const& rhs) const + { + Flags result(*this); + result ^= rhs; + return result; + } + + bool operator!() const + { + return !m_mask; + } + + Flags operator~() const + { + Flags result(*this); + result.m_mask ^= FlagTraits::allFlags; + return result; + } + + bool operator==(Flags const& rhs) const + { + return m_mask == rhs.m_mask; + } + + bool operator!=(Flags const& rhs) const + { + return m_mask != rhs.m_mask; + } + + explicit operator bool() const + { + return !!m_mask; + } + + explicit operator MaskType() const + { + return m_mask; + } + + private: + MaskType m_mask; + }; + + template + Flags operator|(BitType bit, Flags const& flags) + { + return flags | bit; + } + + template + Flags operator&(BitType bit, Flags const& flags) + { + return flags & bit; + } + + template + Flags operator^(BitType bit, Flags const& flags) + { + return flags ^ bit; + } + + + template + class Optional + { + public: + Optional(RefType & reference) { m_ptr = &reference; } + Optional(RefType * ptr) { m_ptr = ptr; } + Optional(std::nullptr_t) { m_ptr = nullptr; } + + operator RefType*() const { return m_ptr; } + RefType const* operator->() const { return m_ptr; } + explicit operator bool() const { return !!m_ptr; } + + private: + RefType *m_ptr; + }; + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + class ArrayProxy + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) + : m_count(0) + , m_ptr(nullptr) + {} + + ArrayProxy(T & ptr) + : m_count(1) + , m_ptr(&ptr) + {} + + ArrayProxy(uint32_t count, T * ptr) + : m_count(count) + , m_ptr(ptr) + {} + + template + ArrayProxy(std::array::type, N> & data) + : m_count(N) + , m_ptr(data.data()) + {} + + template + ArrayProxy(std::array::type, N> const& data) + : m_count(N) + , m_ptr(data.data()) + {} + + template ::type>> + ArrayProxy(std::vector::type, Allocator> & data) + : m_count(static_cast(data.size())) + , m_ptr(data.data()) + {} + + template ::type>> + ArrayProxy(std::vector::type, Allocator> const& data) + : m_count(static_cast(data.size())) + , m_ptr(data.data()) + {} + + ArrayProxy(std::initializer_list const& data) + : m_count(static_cast(data.end() - data.begin())) + , m_ptr(data.begin()) + {} + + const T * begin() const + { + return m_ptr; + } + + const T * end() const + { + return m_ptr + m_count; + } + + const T & front() const + { + VULKAN_HPP_ASSERT(m_count && m_ptr); + return *m_ptr; + } + + const T & back() const + { + VULKAN_HPP_ASSERT(m_count && m_ptr); + return *(m_ptr + m_count - 1); + } + + bool empty() const + { + return (m_count == 0); + } + + uint32_t size() const + { + return m_count; + } + + T * data() const + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; + }; +#endif + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + + template class UniqueHandleTraits; + + template + class UniqueHandle : public UniqueHandleTraits::deleter + { + private: + using Deleter = typename UniqueHandleTraits::deleter; + public: + explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() ) + : Deleter( deleter) + , m_value( value ) + {} + + UniqueHandle( UniqueHandle const& ) = delete; + + UniqueHandle( UniqueHandle && other ) + : Deleter( std::move( static_cast( other ) ) ) + , m_value( other.release() ) + {} + + ~UniqueHandle() + { + if ( m_value ) this->destroy( m_value ); + } + + UniqueHandle & operator=( UniqueHandle const& ) = delete; + + UniqueHandle & operator=( UniqueHandle && other ) + { + reset( other.release() ); + *static_cast(this) = std::move( static_cast(other) ); + return *this; + } + + explicit operator bool() const + { + return m_value.operator bool(); + } + + Type const* operator->() const + { + return &m_value; + } + + Type * operator->() + { + return &m_value; + } + + Type const& operator*() const + { + return m_value; + } + + Type & operator*() + { + return m_value; + } + + const Type & get() const + { + return m_value; + } + + Type & get() + { + return m_value; + } + + void reset( Type const& value = Type() ) + { + if ( m_value != value ) + { + if ( m_value ) this->destroy( m_value ); + m_value = value; + } + } + + Type release() + { + Type value = m_value; + m_value = nullptr; + return value; + } + + void swap( UniqueHandle & rhs ) + { + std::swap(m_value, rhs.m_value); + std::swap(static_cast(*this), static_cast(rhs)); + } + + private: + Type m_value; + }; + + template + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) + { + lhs.swap( rhs ); + } +#endif + + + template struct isStructureChainValid { enum { value = false }; }; + + template + class StructureChainElement + { + public: + explicit operator Element&() { return value; } + explicit operator const Element&() const { return value; } + private: + Element value; + }; + + template + class StructureChain : private StructureChainElement... + { + public: + StructureChain() + { + link(); + } + + StructureChain(StructureChain const &rhs) + { + linkAndCopy(rhs); + } + + StructureChain& operator=(StructureChain const &rhs) + { + linkAndCopy(rhs); + return *this; + } + + template ClassType& get() { return static_cast(*this);} + + private: + template + void link() + { + } + + template + void link() + { + static_assert(isStructureChainValid::value, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x.pNext = &y; + link(); + } + + template + void linkAndCopy(StructureChain const &rhs) + { + static_cast(*this) = static_cast(rhs); + } + + template + void linkAndCopy(StructureChain const &rhs) + { + static_assert(isStructureChainValid::value, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x = static_cast(rhs); + x.pNext = &y; + linkAndCopy(rhs); + } + +}; + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string(Result value) + { + switch (value) + { + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorFragmentationEXT: return "ErrorFragmentationEXT"; + case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT"; + default: return "invalid"; + } + } + +#ifndef VULKAN_HPP_NO_EXCEPTIONS +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# define noexcept _NOEXCEPT +#endif + + class ErrorCategoryImpl : public std::error_category + { + public: + virtual const char* name() const noexcept override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; } + virtual std::string message(int ev) const override { return to_string(static_cast(ev)); } + }; + +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# undef noexcept +#endif + + VULKAN_HPP_INLINE const std::error_category& errorCategory() + { + static ErrorCategoryImpl instance; + return instance; + } + + VULKAN_HPP_INLINE std::error_code make_error_code(Result e) + { + return std::error_code(static_cast(e), errorCategory()); + } + + VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) + { + return std::error_condition(static_cast(e), errorCategory()); + } + +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# define noexcept _NOEXCEPT +#endif + + class Error + { + public: + virtual ~Error() = default; + + virtual const char* what() const noexcept = 0; + }; + + class LogicError : public Error, public std::logic_error + { + public: + explicit LogicError( const std::string& what ) + : Error(), std::logic_error(what) {} + explicit LogicError( char const * what ) + : Error(), std::logic_error(what) {} + virtual ~LogicError() = default; + + virtual const char* what() const noexcept { return std::logic_error::what(); } + }; + + class SystemError : public Error, public std::system_error + { + public: + SystemError( std::error_code ec ) + : Error(), std::system_error(ec) {} + SystemError( std::error_code ec, std::string const& what ) + : Error(), std::system_error(ec, what) {} + SystemError( std::error_code ec, char const * what ) + : Error(), std::system_error(ec, what) {} + SystemError( int ev, std::error_category const& ecat ) + : Error(), std::system_error(ev, ecat) {} + SystemError( int ev, std::error_category const& ecat, std::string const& what) + : Error(), std::system_error(ev, ecat, what) {} + SystemError( int ev, std::error_category const& ecat, char const * what) + : Error(), std::system_error(ev, ecat, what) {} + virtual ~SystemError() = default; + + virtual const char* what() const noexcept { return std::system_error::what(); } + }; + +#if defined(_MSC_VER) && (_MSC_VER == 1800) +# undef noexcept +#endif + + class OutOfHostMemoryError : public SystemError + { + public: + OutOfHostMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + OutOfHostMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + }; + class OutOfDeviceMemoryError : public SystemError + { + public: + OutOfDeviceMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + OutOfDeviceMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + }; + class InitializationFailedError : public SystemError + { + public: + InitializationFailedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + InitializationFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + }; + class DeviceLostError : public SystemError + { + public: + DeviceLostError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + DeviceLostError( char const * message ) + : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; + class MemoryMapFailedError : public SystemError + { + public: + MemoryMapFailedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + MemoryMapFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + }; + class LayerNotPresentError : public SystemError + { + public: + LayerNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + LayerNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + }; + class ExtensionNotPresentError : public SystemError + { + public: + ExtensionNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + ExtensionNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + }; + class FeatureNotPresentError : public SystemError + { + public: + FeatureNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + FeatureNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + }; + class IncompatibleDriverError : public SystemError + { + public: + IncompatibleDriverError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + IncompatibleDriverError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + }; + class TooManyObjectsError : public SystemError + { + public: + TooManyObjectsError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + TooManyObjectsError( char const * message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + }; + class FormatNotSupportedError : public SystemError + { + public: + FormatNotSupportedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + FormatNotSupportedError( char const * message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + }; + class FragmentedPoolError : public SystemError + { + public: + FragmentedPoolError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + FragmentedPoolError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + }; + class OutOfPoolMemoryError : public SystemError + { + public: + OutOfPoolMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + OutOfPoolMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + }; + class InvalidExternalHandleError : public SystemError + { + public: + InvalidExternalHandleError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + InvalidExternalHandleError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + }; + class SurfaceLostKHRError : public SystemError + { + public: + SurfaceLostKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + SurfaceLostKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + }; + class NativeWindowInUseKHRError : public SystemError + { + public: + NativeWindowInUseKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + NativeWindowInUseKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + }; + class OutOfDateKHRError : public SystemError + { + public: + OutOfDateKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + OutOfDateKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; + class IncompatibleDisplayKHRError : public SystemError + { + public: + IncompatibleDisplayKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + IncompatibleDisplayKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + }; + class ValidationFailedEXTError : public SystemError + { + public: + ValidationFailedEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + ValidationFailedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + }; + class InvalidShaderNVError : public SystemError + { + public: + InvalidShaderNVError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + InvalidShaderNVError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + }; + class FragmentationEXTError : public SystemError + { + public: + FragmentationEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} + FragmentationEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} + }; + class NotPermittedEXTError : public SystemError + { + public: + NotPermittedEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + NotPermittedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + }; + + VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError ( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError ( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError ( message ); + case Result::eErrorDeviceLost: throw DeviceLostError ( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError ( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError ( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError ( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError ( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError ( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError ( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError ( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError ( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError ( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError ( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError ( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError ( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError ( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError ( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError ( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError ( message ); + case Result::eErrorFragmentationEXT: throw FragmentationEXTError ( message ); + case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError ( message ); + default: throw SystemError( make_error_code( result ) ); + } + } +#endif +} // namespace VULKAN_HPP_NAMESPACE + +namespace std +{ + template <> + struct is_error_code_enum : public true_type + {}; +} + +namespace VULKAN_HPP_NAMESPACE +{ + + template + struct ResultValue + { + ResultValue( Result r, T & v ) + : result( r ) + , value( v ) + {} + + ResultValue( Result r, T && v ) + : result( r ) + , value( std::move( v ) ) + {} + + Result result; + T value; + + operator std::tuple() { return std::tuple(result, value); } + }; + + template + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef ResultValue type; +#else + typedef T type; +#endif + }; + + template <> + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef Result type; +#else + typedef void type; +#endif + }; + + VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return result; +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } +#endif + } + + template + VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return ResultValue( result, data ); +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +#endif + } + + VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return result; + } + + template + VULKAN_HPP_INLINE ResultValue createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return ResultValue( result, data ); + } + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const& deleter ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return ResultValue>( result, UniqueHandle(data, deleter) ); +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return UniqueHandle(data, deleter); +#endif + } +#endif + + + struct AllocationCallbacks; + + template + class ObjectDestroy + { + public: + ObjectDestroy(OwnerType owner = OwnerType(), Optional allocator = nullptr) + : m_owner(owner) + , m_allocator(allocator) + {} + + OwnerType getOwner() const { return m_owner; } + Optional getAllocator() const { return m_allocator; } + + protected: + template + void destroy(T t) + { + m_owner.destroy(t, m_allocator); + } + + private: + OwnerType m_owner; + Optional m_allocator; + }; + + class NoParent; + + template <> + class ObjectDestroy + { + public: + ObjectDestroy( Optional allocator = nullptr ) + : m_allocator( allocator ) + {} + + Optional getAllocator() const { return m_allocator; } + + protected: + template + void destroy(T t) + { + t.destroy( m_allocator ); + } + + private: + Optional m_allocator; + }; + + template + class ObjectFree + { + public: + ObjectFree(OwnerType owner = OwnerType(), Optional allocator = nullptr) + : m_owner(owner) + , m_allocator(allocator) + {} + + OwnerType getOwner() const { return m_owner; } + Optional getAllocator() const { return m_allocator; } + + protected: + template + void destroy(T t) + { + m_owner.free(t, m_allocator); + } + + private: + OwnerType m_owner; + Optional m_allocator; + }; + + template + class PoolFree + { + public: + PoolFree(OwnerType owner = OwnerType(), PoolType pool = PoolType()) + : m_owner(owner) + , m_pool(pool) + {} + + OwnerType getOwner() const { return m_owner; } + PoolType getPool() const { return m_pool; } + + protected: + template + void destroy(T t) + { + m_owner.free(m_pool, t); + } + + private: + OwnerType m_owner; + PoolType m_pool; + }; + +class DispatchLoaderStatic +{ +public: + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex); + } + VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex); + } +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display); + } +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers); + } + VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets); + } + VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory); + } + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo); + } + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset); + } + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos); + } + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos); + } + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const + { + return ::vkBindImageMemory( device, image, memory, memoryOffset); + } + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos); + } + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos); + } + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo); + } + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags); + } + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents); + } + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const + { + return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); + } + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType); + } + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline); + } + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); + } + void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); + } + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects); + } + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); + } + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); + } + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); + } + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); + } + void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); + } + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); + } + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); + } + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo); + } + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer); + } + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo); + } + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ); + } + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); + } + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); + } + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset); + } + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); + } + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); + } + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride); + } + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride); + } + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); + } + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer); + } + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query); + } + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const + { + return ::vkCmdEndRenderPass( commandBuffer); + } + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers); + } + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data); + } + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo); + } + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const + { + return ::vkCmdNextSubpass( commandBuffer, contents); + } + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const + { + return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); + } + void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const + { + return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo); + } + void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues); + } + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); + } + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData); + } + void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const + { + return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo); + } + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask); + } + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount); + } + void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); + } + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants); + } + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); + } + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds); + } + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask); + } + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask); + } + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles); + } + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask); + } + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth); + } + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo); + } + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors); + } + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask); + } + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference); + } + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask); + } + void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports); + } + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings); + } + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData); + } + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const + { + return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); + } + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker); + } + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query); + } +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer); + } + VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView); + } + VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool); + } + VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback); + } + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger); + } + VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool); + } + VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout); + } + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); + } + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); + } + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice); + } + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode); + } + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } + VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent); + } + VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence); + } + VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer); + } + VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); + } +#ifdef VK_USE_PLATFORM_IOS_MVK + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage); + } + VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView); + } + VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const + { + return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout); + } + VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance); + } +#ifdef VK_USE_PLATFORM_MACOS_MVK + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#ifdef VK_USE_PLATFORM_MIR_KHR + VkResult vkCreateMirSurfaceKHR( VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateMirSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const + { + return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable); + } + VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache); + } + VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout); + } + VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool); + } + VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass); + } + VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler); + } + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion); + } + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion); + } + VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore); + } + VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule); + } + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); + } + VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain); + } + VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache); + } +#ifdef VK_USE_PLATFORM_VI_NN + VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface); + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo); + } + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo); + } + void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage); + } + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyBuffer( device, buffer, pAllocator); + } + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyBufferView( device, bufferView, pAllocator); + } + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator); + } + void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator); + } + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator); + } + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator); + } + void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator); + } + void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator); + } + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator); + } + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyDevice( device, pAllocator); + } + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyEvent( device, event, pAllocator); + } + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyFence( device, fence, pAllocator); + } + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator); + } + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyImage( device, image, pAllocator); + } + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyImageView( device, imageView, pAllocator); + } + void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator); + } + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyInstance( instance, pAllocator); + } + void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator); + } + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyPipeline( device, pipeline, pAllocator); + } + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator); + } + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator); + } + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator); + } + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator); + } + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySampler( device, sampler, pAllocator); + } + void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator); + } + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator); + } + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySemaphore( device, semaphore, pAllocator); + } + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator); + } + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator); + } + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator); + } + void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator); + } + VkResult vkDeviceWaitIdle( VkDevice device ) const + { + return ::vkDeviceWaitIdle( device); + } + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo); + } + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const + { + return ::vkEndCommandBuffer( commandBuffer); + } + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties); + } + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties); + } + VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties); + } + VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties); + } + VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const + { + return ::vkEnumerateInstanceVersion( pApiVersion); + } + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); + } + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); + } + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices); + } + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges); + } + void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers); + } + VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets); + } + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const + { + return ::vkFreeMemory( device, memory, pAllocator); + } +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties); + } +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements); + } + void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements); + } + void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements); + } + void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport); + } + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport); + } + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + } + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); + } + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities); + } + VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes); + } + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes); + } + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const + { + return ::vkGetDeviceProcAddr( device, pName); + } + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue); + } + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue); + } + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties); + } + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities); + } + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays); + } + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const + { + return ::vkGetEventStatus( device, event); + } + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd); + } + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const + { + return ::vkGetFenceStatus( device, fence); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements); + } + void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements); + } + void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements); + } + void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } + void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); + } + void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout); + } + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const + { + return ::vkGetInstanceProcAddr( instance, pName); + } +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer); + } +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd); + } + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties); + } + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings); + } + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties); + } + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties); + } + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties); + } + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties); + } + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties); + } + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties); + } + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties); + } + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + } + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); + } + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures); + } + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures); + } + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures); + } + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties); + } + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties); + } + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties); + } + void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const + { + return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits); + } + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); + } + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties); + } + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties); + } + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties); + } + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties); + } + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties); + } +#ifdef VK_USE_PLATFORM_MIR_KHR + VkBool32 vkGetPhysicalDeviceMirPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection ) const + { + return ::vkGetPhysicalDeviceMirPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection); + } +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties); + } + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects); + } + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties); + } + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties); + } + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties); + } + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); + } + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); + } + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties); + } + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties); + } + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities); + } + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities); + } + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities); + } + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); + } + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); + } + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes); + } + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported); + } +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display); + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id); + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID); + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData); + } + VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); + } +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay); + } +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties); + } + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity); + } + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo); + } + VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue); + } + VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages); + } + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const + { + return ::vkGetSwapchainStatusKHR( device, swapchain); + } + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData); + } + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo); + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges); + } + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData); + } + VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches); + } + VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches); + } + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo); + } + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence); + } + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const + { + return ::vkQueueEndDebugUtilsLabelEXT( queue); + } + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo); + } + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const + { + return ::vkQueuePresentKHR( queue, pPresentInfo); + } + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence); + } + VkResult vkQueueWaitIdle( VkQueue queue ) const + { + return ::vkQueueWaitIdle( queue); + } + VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence); + } + VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence); + } + VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const + { + return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices); + } + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const + { + return ::vkReleaseDisplayEXT( physicalDevice, display); + } + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const + { + return ::vkResetCommandBuffer( commandBuffer, flags); + } + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const + { + return ::vkResetCommandPool( device, commandPool, flags); + } + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const + { + return ::vkResetDescriptorPool( device, descriptorPool, flags); + } + VkResult vkResetEvent( VkDevice device, VkEvent event ) const + { + return ::vkResetEvent( device, event); + } + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const + { + return ::vkResetFences( device, fenceCount, pFences); + } + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo); + } + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo); + } + VkResult vkSetEvent( VkDevice device, VkEvent event ) const + { + return ::vkSetEvent( device, event); + } + void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata); + } + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData); + } + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const + { + return ::vkTrimCommandPool( device, commandPool, flags); + } + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags); + } + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const + { + return ::vkUnmapMemory( device, memory); + } + VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const + { + return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices); + } + void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData); + } + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData); + } + void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); + } + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout); + } +}; + using SampleMask = uint32_t; + + using Bool32 = uint32_t; + + using DeviceSize = uint64_t; + + enum class FramebufferCreateFlagBits + { + }; + + using FramebufferCreateFlags = Flags; + + enum class QueryPoolCreateFlagBits + { + }; + + using QueryPoolCreateFlags = Flags; + + enum class RenderPassCreateFlagBits + { + }; + + using RenderPassCreateFlags = Flags; + + enum class SamplerCreateFlagBits + { + }; + + using SamplerCreateFlags = Flags; + + enum class PipelineLayoutCreateFlagBits + { + }; + + using PipelineLayoutCreateFlags = Flags; + + enum class PipelineCacheCreateFlagBits + { + }; + + using PipelineCacheCreateFlags = Flags; + + enum class PipelineDepthStencilStateCreateFlagBits + { + }; + + using PipelineDepthStencilStateCreateFlags = Flags; + + enum class PipelineDynamicStateCreateFlagBits + { + }; + + using PipelineDynamicStateCreateFlags = Flags; + + enum class PipelineColorBlendStateCreateFlagBits + { + }; + + using PipelineColorBlendStateCreateFlags = Flags; + + enum class PipelineMultisampleStateCreateFlagBits + { + }; + + using PipelineMultisampleStateCreateFlags = Flags; + + enum class PipelineRasterizationStateCreateFlagBits + { + }; + + using PipelineRasterizationStateCreateFlags = Flags; + + enum class PipelineViewportStateCreateFlagBits + { + }; + + using PipelineViewportStateCreateFlags = Flags; + + enum class PipelineTessellationStateCreateFlagBits + { + }; + + using PipelineTessellationStateCreateFlags = Flags; + + enum class PipelineInputAssemblyStateCreateFlagBits + { + }; + + using PipelineInputAssemblyStateCreateFlags = Flags; + + enum class PipelineVertexInputStateCreateFlagBits + { + }; + + using PipelineVertexInputStateCreateFlags = Flags; + + enum class PipelineShaderStageCreateFlagBits + { + }; + + using PipelineShaderStageCreateFlags = Flags; + + enum class BufferViewCreateFlagBits + { + }; + + using BufferViewCreateFlags = Flags; + + enum class InstanceCreateFlagBits + { + }; + + using InstanceCreateFlags = Flags; + + enum class DeviceCreateFlagBits + { + }; + + using DeviceCreateFlags = Flags; + + enum class ImageViewCreateFlagBits + { + }; + + using ImageViewCreateFlags = Flags; + + enum class SemaphoreCreateFlagBits + { + }; + + using SemaphoreCreateFlags = Flags; + + enum class ShaderModuleCreateFlagBits + { + }; + + using ShaderModuleCreateFlags = Flags; + + enum class EventCreateFlagBits + { + }; + + using EventCreateFlags = Flags; + + enum class MemoryMapFlagBits + { + }; + + using MemoryMapFlags = Flags; + + enum class DescriptorPoolResetFlagBits + { + }; + + using DescriptorPoolResetFlags = Flags; + + enum class DescriptorUpdateTemplateCreateFlagBits + { + }; + + using DescriptorUpdateTemplateCreateFlags = Flags; + + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + enum class DisplayModeCreateFlagBitsKHR + { + }; + + using DisplayModeCreateFlagsKHR = Flags; + + enum class DisplaySurfaceCreateFlagBitsKHR + { + }; + + using DisplaySurfaceCreateFlagsKHR = Flags; + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + enum class AndroidSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + using AndroidSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + enum class MirSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + using MirSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + +#ifdef VK_USE_PLATFORM_VI_NN + enum class ViSurfaceCreateFlagBitsNN + { + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_VI_NN + using ViSurfaceCreateFlagsNN = Flags; +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + enum class WaylandSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + using WaylandSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + enum class Win32SurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + using Win32SurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + enum class XlibSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + using XlibSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + enum class XcbSurfaceCreateFlagBitsKHR + { + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + using XcbSurfaceCreateFlagsKHR = Flags; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + enum class IOSSurfaceCreateFlagBitsMVK + { + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + using IOSSurfaceCreateFlagsMVK = Flags; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + enum class MacOSSurfaceCreateFlagBitsMVK + { + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + using MacOSSurfaceCreateFlagsMVK = Flags; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + enum class CommandPoolTrimFlagBits + { + }; + + using CommandPoolTrimFlags = Flags; + + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV + { + }; + + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT + { + }; + + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + enum class PipelineCoverageToColorStateCreateFlagBitsNV + { + }; + + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + enum class PipelineCoverageModulationStateCreateFlagBitsNV + { + }; + + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + enum class ValidationCacheCreateFlagBitsEXT + { + }; + + using ValidationCacheCreateFlagsEXT = Flags; + + enum class DebugUtilsMessengerCreateFlagBitsEXT + { + }; + + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT + { + }; + + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT + { + }; + + using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + + class DeviceMemory + { + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() + : m_deviceMemory(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) + : m_deviceMemory(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) + : m_deviceMemory( deviceMemory ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DeviceMemory & operator=(VkDeviceMemory deviceMemory) + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) + { + m_deviceMemory = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DeviceMemory const & rhs ) const + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=(DeviceMemory const & rhs ) const + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<(DeviceMemory const & rhs ) const + { + return m_deviceMemory < rhs.m_deviceMemory; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const + { + return m_deviceMemory; + } + + explicit operator bool() const + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory; + }; + + static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); + + class CommandPool + { + public: + VULKAN_HPP_CONSTEXPR CommandPool() + : m_commandPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) + : m_commandPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) + : m_commandPool( commandPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandPool & operator=(VkCommandPool commandPool) + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) + { + m_commandPool = VK_NULL_HANDLE; + return *this; + } + + bool operator==( CommandPool const & rhs ) const + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=(CommandPool const & rhs ) const + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<(CommandPool const & rhs ) const + { + return m_commandPool < rhs.m_commandPool; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const + { + return m_commandPool; + } + + explicit operator bool() const + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool; + }; + + static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); + + class Buffer + { + public: + VULKAN_HPP_CONSTEXPR Buffer() + : m_buffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) + : m_buffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) + : m_buffer( buffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Buffer & operator=(VkBuffer buffer) + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) + { + m_buffer = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Buffer const & rhs ) const + { + return m_buffer == rhs.m_buffer; + } + + bool operator!=(Buffer const & rhs ) const + { + return m_buffer != rhs.m_buffer; + } + + bool operator<(Buffer const & rhs ) const + { + return m_buffer < rhs.m_buffer; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const + { + return m_buffer; + } + + explicit operator bool() const + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer; + }; + + static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); + + class BufferView + { + public: + VULKAN_HPP_CONSTEXPR BufferView() + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) + : m_bufferView( bufferView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + BufferView & operator=(VkBufferView bufferView) + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) + { + m_bufferView = VK_NULL_HANDLE; + return *this; + } + + bool operator==( BufferView const & rhs ) const + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=(BufferView const & rhs ) const + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<(BufferView const & rhs ) const + { + return m_bufferView < rhs.m_bufferView; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const + { + return m_bufferView; + } + + explicit operator bool() const + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView; + }; + + static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); + + class Image + { + public: + VULKAN_HPP_CONSTEXPR Image() + : m_image(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) + : m_image(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) + : m_image( image ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Image & operator=(VkImage image) + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) + { + m_image = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Image const & rhs ) const + { + return m_image == rhs.m_image; + } + + bool operator!=(Image const & rhs ) const + { + return m_image != rhs.m_image; + } + + bool operator<(Image const & rhs ) const + { + return m_image < rhs.m_image; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const + { + return m_image; + } + + explicit operator bool() const + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image; + }; + + static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); + + class ImageView + { + public: + VULKAN_HPP_CONSTEXPR ImageView() + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) + : m_imageView( imageView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ImageView & operator=(VkImageView imageView) + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) + { + m_imageView = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ImageView const & rhs ) const + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=(ImageView const & rhs ) const + { + return m_imageView != rhs.m_imageView; + } + + bool operator<(ImageView const & rhs ) const + { + return m_imageView < rhs.m_imageView; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const + { + return m_imageView; + } + + explicit operator bool() const + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView; + }; + + static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); + + class ShaderModule + { + public: + VULKAN_HPP_CONSTEXPR ShaderModule() + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) + : m_shaderModule( shaderModule ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ShaderModule & operator=(VkShaderModule shaderModule) + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) + { + m_shaderModule = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ShaderModule const & rhs ) const + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=(ShaderModule const & rhs ) const + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<(ShaderModule const & rhs ) const + { + return m_shaderModule < rhs.m_shaderModule; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const + { + return m_shaderModule; + } + + explicit operator bool() const + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule; + }; + + static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); + + class Pipeline + { + public: + VULKAN_HPP_CONSTEXPR Pipeline() + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) + : m_pipeline( pipeline ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Pipeline & operator=(VkPipeline pipeline) + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) + { + m_pipeline = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Pipeline const & rhs ) const + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=(Pipeline const & rhs ) const + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<(Pipeline const & rhs ) const + { + return m_pipeline < rhs.m_pipeline; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const + { + return m_pipeline; + } + + explicit operator bool() const + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline; + }; + + static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); + + class PipelineLayout + { + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() + : m_pipelineLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) + : m_pipelineLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) + : m_pipelineLayout( pipelineLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineLayout & operator=(VkPipelineLayout pipelineLayout) + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) + { + m_pipelineLayout = VK_NULL_HANDLE; + return *this; + } + + bool operator==( PipelineLayout const & rhs ) const + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=(PipelineLayout const & rhs ) const + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<(PipelineLayout const & rhs ) const + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const + { + return m_pipelineLayout; + } + + explicit operator bool() const + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout; + }; + + static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); + + class Sampler + { + public: + VULKAN_HPP_CONSTEXPR Sampler() + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) + : m_sampler( sampler ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Sampler & operator=(VkSampler sampler) + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) + { + m_sampler = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Sampler const & rhs ) const + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=(Sampler const & rhs ) const + { + return m_sampler != rhs.m_sampler; + } + + bool operator<(Sampler const & rhs ) const + { + return m_sampler < rhs.m_sampler; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const + { + return m_sampler; + } + + explicit operator bool() const + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler; + }; + + static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); + + class DescriptorSet + { + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) + : m_descriptorSet( descriptorSet ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSet & operator=(VkDescriptorSet descriptorSet) + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) + { + m_descriptorSet = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorSet const & rhs ) const + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=(DescriptorSet const & rhs ) const + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<(DescriptorSet const & rhs ) const + { + return m_descriptorSet < rhs.m_descriptorSet; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const + { + return m_descriptorSet; + } + + explicit operator bool() const + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet; + }; + + static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); + + class DescriptorSetLayout + { + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) + : m_descriptorSetLayout( descriptorSetLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) + { + m_descriptorSetLayout = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorSetLayout const & rhs ) const + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=(DescriptorSetLayout const & rhs ) const + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<(DescriptorSetLayout const & rhs ) const + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const + { + return m_descriptorSetLayout; + } + + explicit operator bool() const + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout; + }; + + static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); + + class DescriptorPool + { + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() + : m_descriptorPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) + : m_descriptorPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) + : m_descriptorPool( descriptorPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorPool & operator=(VkDescriptorPool descriptorPool) + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) + { + m_descriptorPool = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorPool const & rhs ) const + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=(DescriptorPool const & rhs ) const + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<(DescriptorPool const & rhs ) const + { + return m_descriptorPool < rhs.m_descriptorPool; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const + { + return m_descriptorPool; + } + + explicit operator bool() const + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool; + }; + + static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); + + class Fence + { + public: + VULKAN_HPP_CONSTEXPR Fence() + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) + : m_fence( fence ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Fence & operator=(VkFence fence) + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) + { + m_fence = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Fence const & rhs ) const + { + return m_fence == rhs.m_fence; + } + + bool operator!=(Fence const & rhs ) const + { + return m_fence != rhs.m_fence; + } + + bool operator<(Fence const & rhs ) const + { + return m_fence < rhs.m_fence; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const + { + return m_fence; + } + + explicit operator bool() const + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence; + }; + + static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); + + class Semaphore + { + public: + VULKAN_HPP_CONSTEXPR Semaphore() + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) + : m_semaphore( semaphore ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Semaphore & operator=(VkSemaphore semaphore) + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) + { + m_semaphore = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Semaphore const & rhs ) const + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=(Semaphore const & rhs ) const + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<(Semaphore const & rhs ) const + { + return m_semaphore < rhs.m_semaphore; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const + { + return m_semaphore; + } + + explicit operator bool() const + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore; + }; + + static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); + + class Event + { + public: + VULKAN_HPP_CONSTEXPR Event() + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) + : m_event( event ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Event & operator=(VkEvent event) + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) + { + m_event = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Event const & rhs ) const + { + return m_event == rhs.m_event; + } + + bool operator!=(Event const & rhs ) const + { + return m_event != rhs.m_event; + } + + bool operator<(Event const & rhs ) const + { + return m_event < rhs.m_event; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const + { + return m_event; + } + + explicit operator bool() const + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event; + }; + + static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); + + class QueryPool + { + public: + VULKAN_HPP_CONSTEXPR QueryPool() + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) + : m_queryPool( queryPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + QueryPool & operator=(VkQueryPool queryPool) + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) + { + m_queryPool = VK_NULL_HANDLE; + return *this; + } + + bool operator==( QueryPool const & rhs ) const + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=(QueryPool const & rhs ) const + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<(QueryPool const & rhs ) const + { + return m_queryPool < rhs.m_queryPool; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const + { + return m_queryPool; + } + + explicit operator bool() const + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool; + }; + + static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); + + class Framebuffer + { + public: + VULKAN_HPP_CONSTEXPR Framebuffer() + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) + : m_framebuffer( framebuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Framebuffer & operator=(VkFramebuffer framebuffer) + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) + { + m_framebuffer = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Framebuffer const & rhs ) const + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=(Framebuffer const & rhs ) const + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<(Framebuffer const & rhs ) const + { + return m_framebuffer < rhs.m_framebuffer; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const + { + return m_framebuffer; + } + + explicit operator bool() const + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer; + }; + + static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); + + class RenderPass + { + public: + VULKAN_HPP_CONSTEXPR RenderPass() + : m_renderPass(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) + : m_renderPass(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) + : m_renderPass( renderPass ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + RenderPass & operator=(VkRenderPass renderPass) + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) + { + m_renderPass = VK_NULL_HANDLE; + return *this; + } + + bool operator==( RenderPass const & rhs ) const + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=(RenderPass const & rhs ) const + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<(RenderPass const & rhs ) const + { + return m_renderPass < rhs.m_renderPass; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const + { + return m_renderPass; + } + + explicit operator bool() const + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass; + }; + + static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); + + class PipelineCache + { + public: + VULKAN_HPP_CONSTEXPR PipelineCache() + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) + : m_pipelineCache( pipelineCache ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineCache & operator=(VkPipelineCache pipelineCache) + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) + { + m_pipelineCache = VK_NULL_HANDLE; + return *this; + } + + bool operator==( PipelineCache const & rhs ) const + { + return m_pipelineCache == rhs.m_pipelineCache; + } + + bool operator!=(PipelineCache const & rhs ) const + { + return m_pipelineCache != rhs.m_pipelineCache; + } + + bool operator<(PipelineCache const & rhs ) const + { + return m_pipelineCache < rhs.m_pipelineCache; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const + { + return m_pipelineCache; + } + + explicit operator bool() const + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache; + }; + + static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); + + class ObjectTableNVX + { + public: + VULKAN_HPP_CONSTEXPR ObjectTableNVX() + : m_objectTableNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) + : m_objectTableNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) + : m_objectTableNVX( objectTableNVX ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) + { + m_objectTableNVX = objectTableNVX; + return *this; + } +#endif + + ObjectTableNVX & operator=( std::nullptr_t ) + { + m_objectTableNVX = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ObjectTableNVX const & rhs ) const + { + return m_objectTableNVX == rhs.m_objectTableNVX; + } + + bool operator!=(ObjectTableNVX const & rhs ) const + { + return m_objectTableNVX != rhs.m_objectTableNVX; + } + + bool operator<(ObjectTableNVX const & rhs ) const + { + return m_objectTableNVX < rhs.m_objectTableNVX; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const + { + return m_objectTableNVX; + } + + explicit operator bool() const + { + return m_objectTableNVX != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_objectTableNVX == VK_NULL_HANDLE; + } + + private: + VkObjectTableNVX m_objectTableNVX; + }; + + static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" ); + + class IndirectCommandsLayoutNVX + { + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() + : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) + : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) + : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) + { + m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX; + return *this; + } +#endif + + IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) + { + m_indirectCommandsLayoutNVX = VK_NULL_HANDLE; + return *this; + } + + bool operator==( IndirectCommandsLayoutNVX const & rhs ) const + { + return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX; + } + + bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const + { + return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX; + } + + bool operator<(IndirectCommandsLayoutNVX const & rhs ) const + { + return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const + { + return m_indirectCommandsLayoutNVX; + } + + explicit operator bool() const + { + return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX; + }; + + static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" ); + + class DescriptorUpdateTemplate + { + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) + { + m_descriptorUpdateTemplate = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DescriptorUpdateTemplate const & rhs ) const + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=(DescriptorUpdateTemplate const & rhs ) const + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<(DescriptorUpdateTemplate const & rhs ) const + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; + }; + + static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); + + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class SamplerYcbcrConversion + { + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) + { + m_samplerYcbcrConversion = VK_NULL_HANDLE; + return *this; + } + + bool operator==( SamplerYcbcrConversion const & rhs ) const + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=(SamplerYcbcrConversion const & rhs ) const + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<(SamplerYcbcrConversion const & rhs ) const + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion; + }; + + static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); + + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + class ValidationCacheEXT + { + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) + : m_validationCacheEXT( validationCacheEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) + { + m_validationCacheEXT = VK_NULL_HANDLE; + return *this; + } + + bool operator==( ValidationCacheEXT const & rhs ) const + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=(ValidationCacheEXT const & rhs ) const + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<(ValidationCacheEXT const & rhs ) const + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const + { + return m_validationCacheEXT; + } + + explicit operator bool() const + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT; + }; + + static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); + + class DisplayKHR + { + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() + : m_displayKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) + : m_displayKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) + : m_displayKHR( displayKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayKHR & operator=(VkDisplayKHR displayKHR) + { + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) + { + m_displayKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DisplayKHR const & rhs ) const + { + return m_displayKHR == rhs.m_displayKHR; + } + + bool operator!=(DisplayKHR const & rhs ) const + { + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<(DisplayKHR const & rhs ) const + { + return m_displayKHR < rhs.m_displayKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const + { + return m_displayKHR; + } + + explicit operator bool() const + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR; + }; + + static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); + + class DisplayModeKHR + { + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) + : m_displayModeKHR( displayModeKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) + { + m_displayModeKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DisplayModeKHR const & rhs ) const + { + return m_displayModeKHR == rhs.m_displayModeKHR; + } + + bool operator!=(DisplayModeKHR const & rhs ) const + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<(DisplayModeKHR const & rhs ) const + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const + { + return m_displayModeKHR; + } + + explicit operator bool() const + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR; + }; + + static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); + + class SurfaceKHR + { + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) + : m_surfaceKHR( surfaceKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) + { + m_surfaceKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( SurfaceKHR const & rhs ) const + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=(SurfaceKHR const & rhs ) const + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<(SurfaceKHR const & rhs ) const + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const + { + return m_surfaceKHR; + } + + explicit operator bool() const + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR; + }; + + static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); + + class SwapchainKHR + { + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) + : m_swapchainKHR( swapchainKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) + { + m_swapchainKHR = VK_NULL_HANDLE; + return *this; + } + + bool operator==( SwapchainKHR const & rhs ) const + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=(SwapchainKHR const & rhs ) const + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<(SwapchainKHR const & rhs ) const + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const + { + return m_swapchainKHR; + } + + explicit operator bool() const + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR; + }; + + static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); + + class DebugReportCallbackEXT + { + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) + { + m_debugReportCallbackEXT = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DebugReportCallbackEXT const & rhs ) const + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=(DebugReportCallbackEXT const & rhs ) const + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<(DebugReportCallbackEXT const & rhs ) const + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT; + }; + + static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); + + class DebugUtilsMessengerEXT + { + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) + { + m_debugUtilsMessengerEXT = VK_NULL_HANDLE; + return *this; + } + + bool operator==( DebugUtilsMessengerEXT const & rhs ) const + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=(DebugUtilsMessengerEXT const & rhs ) const + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<(DebugUtilsMessengerEXT const & rhs ) const + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; + }; + + static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); + + struct Offset2D + { + Offset2D( int32_t x_ = 0, int32_t y_ = 0 ) + : x( x_ ) + , y( y_ ) + { + } + + Offset2D( VkOffset2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset2D ) ); + } + + Offset2D& operator=( VkOffset2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset2D ) ); + return *this; + } + Offset2D& setX( int32_t x_ ) + { + x = x_; + return *this; + } + + Offset2D& setY( int32_t y_ ) + { + y = y_; + return *this; + } + + operator const VkOffset2D&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Offset2D const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( Offset2D const& rhs ) const + { + return !operator==( rhs ); + } + + int32_t x; + int32_t y; + }; + static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); + + struct Offset3D + { + Offset3D( int32_t x_ = 0, int32_t y_ = 0, int32_t z_ = 0 ) + : x( x_ ) + , y( y_ ) + , z( z_ ) + { + } + + Offset3D( VkOffset3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset3D ) ); + } + + Offset3D& operator=( VkOffset3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Offset3D ) ); + return *this; + } + Offset3D& setX( int32_t x_ ) + { + x = x_; + return *this; + } + + Offset3D& setY( int32_t y_ ) + { + y = y_; + return *this; + } + + Offset3D& setZ( int32_t z_ ) + { + z = z_; + return *this; + } + + operator const VkOffset3D&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Offset3D const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ); + } + + bool operator!=( Offset3D const& rhs ) const + { + return !operator==( rhs ); + } + + int32_t x; + int32_t y; + int32_t z; + }; + static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); + + struct Extent2D + { + Extent2D( uint32_t width_ = 0, uint32_t height_ = 0 ) + : width( width_ ) + , height( height_ ) + { + } + + Extent2D( VkExtent2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent2D ) ); + } + + Extent2D& operator=( VkExtent2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent2D ) ); + return *this; + } + Extent2D& setWidth( uint32_t width_ ) + { + width = width_; + return *this; + } + + Extent2D& setHeight( uint32_t height_ ) + { + height = height_; + return *this; + } + + operator const VkExtent2D&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Extent2D const& rhs ) const + { + return ( width == rhs.width ) + && ( height == rhs.height ); + } + + bool operator!=( Extent2D const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t width; + uint32_t height; + }; + static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); + + struct Extent3D + { + Extent3D( uint32_t width_ = 0, uint32_t height_ = 0, uint32_t depth_ = 0 ) + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + { + } + + Extent3D( VkExtent3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent3D ) ); + } + + Extent3D& operator=( VkExtent3D const & rhs ) + { + memcpy( this, &rhs, sizeof( Extent3D ) ); + return *this; + } + Extent3D& setWidth( uint32_t width_ ) + { + width = width_; + return *this; + } + + Extent3D& setHeight( uint32_t height_ ) + { + height = height_; + return *this; + } + + Extent3D& setDepth( uint32_t depth_ ) + { + depth = depth_; + return *this; + } + + operator const VkExtent3D&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Extent3D const& rhs ) const + { + return ( width == rhs.width ) + && ( height == rhs.height ) + && ( depth == rhs.depth ); + } + + bool operator!=( Extent3D const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t width; + uint32_t height; + uint32_t depth; + }; + static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); + + struct Viewport + { + Viewport( float x_ = 0, float y_ = 0, float width_ = 0, float height_ = 0, float minDepth_ = 0, float maxDepth_ = 0 ) + : x( x_ ) + , y( y_ ) + , width( width_ ) + , height( height_ ) + , minDepth( minDepth_ ) + , maxDepth( maxDepth_ ) + { + } + + Viewport( VkViewport const & rhs ) + { + memcpy( this, &rhs, sizeof( Viewport ) ); + } + + Viewport& operator=( VkViewport const & rhs ) + { + memcpy( this, &rhs, sizeof( Viewport ) ); + return *this; + } + Viewport& setX( float x_ ) + { + x = x_; + return *this; + } + + Viewport& setY( float y_ ) + { + y = y_; + return *this; + } + + Viewport& setWidth( float width_ ) + { + width = width_; + return *this; + } + + Viewport& setHeight( float height_ ) + { + height = height_; + return *this; + } + + Viewport& setMinDepth( float minDepth_ ) + { + minDepth = minDepth_; + return *this; + } + + Viewport& setMaxDepth( float maxDepth_ ) + { + maxDepth = maxDepth_; + return *this; + } + + operator const VkViewport&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Viewport const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( minDepth == rhs.minDepth ) + && ( maxDepth == rhs.maxDepth ); + } + + bool operator!=( Viewport const& rhs ) const + { + return !operator==( rhs ); + } + + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; + }; + static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); + + struct Rect2D + { + Rect2D( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D() ) + : offset( offset_ ) + , extent( extent_ ) + { + } + + Rect2D( VkRect2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Rect2D ) ); + } + + Rect2D& operator=( VkRect2D const & rhs ) + { + memcpy( this, &rhs, sizeof( Rect2D ) ); + return *this; + } + Rect2D& setOffset( Offset2D offset_ ) + { + offset = offset_; + return *this; + } + + Rect2D& setExtent( Extent2D extent_ ) + { + extent = extent_; + return *this; + } + + operator const VkRect2D&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Rect2D const& rhs ) const + { + return ( offset == rhs.offset ) + && ( extent == rhs.extent ); + } + + bool operator!=( Rect2D const& rhs ) const + { + return !operator==( rhs ); + } + + Offset2D offset; + Extent2D extent; + }; + static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); + + struct ClearRect + { + ClearRect( Rect2D rect_ = Rect2D(), uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 ) + : rect( rect_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + ClearRect( VkClearRect const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearRect ) ); + } + + ClearRect& operator=( VkClearRect const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearRect ) ); + return *this; + } + ClearRect& setRect( Rect2D rect_ ) + { + rect = rect_; + return *this; + } + + ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ ) + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ClearRect& setLayerCount( uint32_t layerCount_ ) + { + layerCount = layerCount_; + return *this; + } + + operator const VkClearRect&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ClearRect const& rhs ) const + { + return ( rect == rhs.rect ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ClearRect const& rhs ) const + { + return !operator==( rhs ); + } + + Rect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; + }; + static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); + + struct ExtensionProperties + { + operator const VkExtensionProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExtensionProperties const& rhs ) const + { + return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( specVersion == rhs.specVersion ); + } + + bool operator!=( ExtensionProperties const& rhs ) const + { + return !operator==( rhs ); + } + + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + }; + static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); + + struct LayerProperties + { + operator const VkLayerProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( LayerProperties const& rhs ) const + { + return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( specVersion == rhs.specVersion ) + && ( implementationVersion == rhs.implementationVersion ) + && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ); + } + + bool operator!=( LayerProperties const& rhs ) const + { + return !operator==( rhs ); + } + + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; + }; + static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); + + struct AllocationCallbacks + { + AllocationCallbacks( void* pUserData_ = nullptr, PFN_vkAllocationFunction pfnAllocation_ = nullptr, PFN_vkReallocationFunction pfnReallocation_ = nullptr, PFN_vkFreeFunction pfnFree_ = nullptr, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr ) + : pUserData( pUserData_ ) + , pfnAllocation( pfnAllocation_ ) + , pfnReallocation( pfnReallocation_ ) + , pfnFree( pfnFree_ ) + , pfnInternalAllocation( pfnInternalAllocation_ ) + , pfnInternalFree( pfnInternalFree_ ) + { + } + + AllocationCallbacks( VkAllocationCallbacks const & rhs ) + { + memcpy( this, &rhs, sizeof( AllocationCallbacks ) ); + } + + AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) + { + memcpy( this, &rhs, sizeof( AllocationCallbacks ) ); + return *this; + } + AllocationCallbacks& setPUserData( void* pUserData_ ) + { + pUserData = pUserData_; + return *this; + } + + AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) + { + pfnAllocation = pfnAllocation_; + return *this; + } + + AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) + { + pfnReallocation = pfnReallocation_; + return *this; + } + + AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ ) + { + pfnFree = pfnFree_; + return *this; + } + + AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) + { + pfnInternalAllocation = pfnInternalAllocation_; + return *this; + } + + AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) + { + pfnInternalFree = pfnInternalFree_; + return *this; + } + + operator const VkAllocationCallbacks&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AllocationCallbacks const& rhs ) const + { + return ( pUserData == rhs.pUserData ) + && ( pfnAllocation == rhs.pfnAllocation ) + && ( pfnReallocation == rhs.pfnReallocation ) + && ( pfnFree == rhs.pfnFree ) + && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) + && ( pfnInternalFree == rhs.pfnInternalFree ); + } + + bool operator!=( AllocationCallbacks const& rhs ) const + { + return !operator==( rhs ); + } + + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; + }; + static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); + + struct MemoryRequirements + { + operator const VkMemoryRequirements&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryRequirements const& rhs ) const + { + return ( size == rhs.size ) + && ( alignment == rhs.alignment ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryRequirements const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize size; + DeviceSize alignment; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); + + struct DescriptorBufferInfo + { + DescriptorBufferInfo( Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize range_ = 0 ) + : buffer( buffer_ ) + , offset( offset_ ) + , range( range_ ) + { + } + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) ); + } + + DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorBufferInfo ) ); + return *this; + } + DescriptorBufferInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + DescriptorBufferInfo& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + DescriptorBufferInfo& setRange( DeviceSize range_ ) + { + range = range_; + return *this; + } + + operator const VkDescriptorBufferInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorBufferInfo const& rhs ) const + { + return ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( range == rhs.range ); + } + + bool operator!=( DescriptorBufferInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Buffer buffer; + DeviceSize offset; + DeviceSize range; + }; + static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); + + struct SubresourceLayout + { + operator const VkSubresourceLayout&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SubresourceLayout const& rhs ) const + { + return ( offset == rhs.offset ) + && ( size == rhs.size ) + && ( rowPitch == rhs.rowPitch ) + && ( arrayPitch == rhs.arrayPitch ) + && ( depthPitch == rhs.depthPitch ); + } + + bool operator!=( SubresourceLayout const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize offset; + DeviceSize size; + DeviceSize rowPitch; + DeviceSize arrayPitch; + DeviceSize depthPitch; + }; + static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); + + struct BufferCopy + { + BufferCopy( DeviceSize srcOffset_ = 0, DeviceSize dstOffset_ = 0, DeviceSize size_ = 0 ) + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + { + } + + BufferCopy( VkBufferCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCopy ) ); + } + + BufferCopy& operator=( VkBufferCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCopy ) ); + return *this; + } + BufferCopy& setSrcOffset( DeviceSize srcOffset_ ) + { + srcOffset = srcOffset_; + return *this; + } + + BufferCopy& setDstOffset( DeviceSize dstOffset_ ) + { + dstOffset = dstOffset_; + return *this; + } + + BufferCopy& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + operator const VkBufferCopy&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BufferCopy const& rhs ) const + { + return ( srcOffset == rhs.srcOffset ) + && ( dstOffset == rhs.dstOffset ) + && ( size == rhs.size ); + } + + bool operator!=( BufferCopy const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize srcOffset; + DeviceSize dstOffset; + DeviceSize size; + }; + static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); + + struct SpecializationMapEntry + { + SpecializationMapEntry( uint32_t constantID_ = 0, uint32_t offset_ = 0, size_t size_ = 0 ) + : constantID( constantID_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationMapEntry ) ); + } + + SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationMapEntry ) ); + return *this; + } + SpecializationMapEntry& setConstantID( uint32_t constantID_ ) + { + constantID = constantID_; + return *this; + } + + SpecializationMapEntry& setOffset( uint32_t offset_ ) + { + offset = offset_; + return *this; + } + + SpecializationMapEntry& setSize( size_t size_ ) + { + size = size_; + return *this; + } + + operator const VkSpecializationMapEntry&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SpecializationMapEntry const& rhs ) const + { + return ( constantID == rhs.constantID ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( SpecializationMapEntry const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t constantID; + uint32_t offset; + size_t size; + }; + static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" ); + + struct SpecializationInfo + { + SpecializationInfo( uint32_t mapEntryCount_ = 0, const SpecializationMapEntry* pMapEntries_ = nullptr, size_t dataSize_ = 0, const void* pData_ = nullptr ) + : mapEntryCount( mapEntryCount_ ) + , pMapEntries( pMapEntries_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + SpecializationInfo( VkSpecializationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationInfo ) ); + } + + SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SpecializationInfo ) ); + return *this; + } + SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ ) + { + mapEntryCount = mapEntryCount_; + return *this; + } + + SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ ) + { + pMapEntries = pMapEntries_; + return *this; + } + + SpecializationInfo& setDataSize( size_t dataSize_ ) + { + dataSize = dataSize_; + return *this; + } + + SpecializationInfo& setPData( const void* pData_ ) + { + pData = pData_; + return *this; + } + + operator const VkSpecializationInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SpecializationInfo const& rhs ) const + { + return ( mapEntryCount == rhs.mapEntryCount ) + && ( pMapEntries == rhs.pMapEntries ) + && ( dataSize == rhs.dataSize ) + && ( pData == rhs.pData ); + } + + bool operator!=( SpecializationInfo const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t mapEntryCount; + const SpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; + }; + static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); + + union ClearColorValue + { + ClearColorValue( const std::array& float32_ = { {0} } ) + { + memcpy( &float32, float32_.data(), 4 * sizeof( float ) ); + } + + ClearColorValue( const std::array& int32_ ) + { + memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) ); + } + + ClearColorValue( const std::array& uint32_ ) + { + memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); + } + + ClearColorValue& setFloat32( std::array float32_ ) + { + memcpy( &float32, float32_.data(), 4 * sizeof( float ) ); + return *this; + } + + ClearColorValue& setInt32( std::array int32_ ) + { + memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) ); + return *this; + } + + ClearColorValue& setUint32( std::array uint32_ ) + { + memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); + return *this; + } + + operator VkClearColorValue const& () const + { + return *reinterpret_cast(this); + } + + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; + }; + + struct ClearDepthStencilValue + { + ClearDepthStencilValue( float depth_ = 0, uint32_t stencil_ = 0 ) + : depth( depth_ ) + , stencil( stencil_ ) + { + } + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) ); + } + + ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearDepthStencilValue ) ); + return *this; + } + ClearDepthStencilValue& setDepth( float depth_ ) + { + depth = depth_; + return *this; + } + + ClearDepthStencilValue& setStencil( uint32_t stencil_ ) + { + stencil = stencil_; + return *this; + } + + operator const VkClearDepthStencilValue&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ClearDepthStencilValue const& rhs ) const + { + return ( depth == rhs.depth ) + && ( stencil == rhs.stencil ); + } + + bool operator!=( ClearDepthStencilValue const& rhs ) const + { + return !operator==( rhs ); + } + + float depth; + uint32_t stencil; + }; + static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" ); + + union ClearValue + { + ClearValue( ClearColorValue color_ = ClearColorValue() ) + { + color = color_; + } + + ClearValue( ClearDepthStencilValue depthStencil_ ) + { + depthStencil = depthStencil_; + } + + ClearValue& setColor( ClearColorValue color_ ) + { + color = color_; + return *this; + } + + ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ ) + { + depthStencil = depthStencil_; + return *this; + } + + operator VkClearValue const& () const + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + ClearColorValue color; + ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + }; + + struct PhysicalDeviceFeatures + { + PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0, Bool32 fullDrawIndexUint32_ = 0, Bool32 imageCubeArray_ = 0, Bool32 independentBlend_ = 0, Bool32 geometryShader_ = 0, Bool32 tessellationShader_ = 0, Bool32 sampleRateShading_ = 0, Bool32 dualSrcBlend_ = 0, Bool32 logicOp_ = 0, Bool32 multiDrawIndirect_ = 0, Bool32 drawIndirectFirstInstance_ = 0, Bool32 depthClamp_ = 0, Bool32 depthBiasClamp_ = 0, Bool32 fillModeNonSolid_ = 0, Bool32 depthBounds_ = 0, Bool32 wideLines_ = 0, Bool32 largePoints_ = 0, Bool32 alphaToOne_ = 0, Bool32 multiViewport_ = 0, Bool32 samplerAnisotropy_ = 0, Bool32 textureCompressionETC2_ = 0, Bool32 textureCompressionASTC_LDR_ = 0, Bool32 textureCompressionBC_ = 0, Bool32 occlusionQueryPrecise_ = 0, Bool32 pipelineStatisticsQuery_ = 0, Bool32 vertexPipelineStoresAndAtomics_ = 0, Bool32 fragmentStoresAndAtomics_ = 0, Bool32 shaderTessellationAndGeometryPointSize_ = 0, Bool32 shaderImageGatherExtended_ = 0, Bool32 shaderStorageImageExtendedFormats_ = 0, Bool32 shaderStorageImageMultisample_ = 0, Bool32 shaderStorageImageReadWithoutFormat_ = 0, Bool32 shaderStorageImageWriteWithoutFormat_ = 0, Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, Bool32 shaderClipDistance_ = 0, Bool32 shaderCullDistance_ = 0, Bool32 shaderFloat64_ = 0, Bool32 shaderInt64_ = 0, Bool32 shaderInt16_ = 0, Bool32 shaderResourceResidency_ = 0, Bool32 shaderResourceMinLod_ = 0, Bool32 sparseBinding_ = 0, Bool32 sparseResidencyBuffer_ = 0, Bool32 sparseResidencyImage2D_ = 0, Bool32 sparseResidencyImage3D_ = 0, Bool32 sparseResidency2Samples_ = 0, Bool32 sparseResidency4Samples_ = 0, Bool32 sparseResidency8Samples_ = 0, Bool32 sparseResidency16Samples_ = 0, Bool32 sparseResidencyAliased_ = 0, Bool32 variableMultisampleRate_ = 0, Bool32 inheritedQueries_ = 0 ) + : robustBufferAccess( robustBufferAccess_ ) + , fullDrawIndexUint32( fullDrawIndexUint32_ ) + , imageCubeArray( imageCubeArray_ ) + , independentBlend( independentBlend_ ) + , geometryShader( geometryShader_ ) + , tessellationShader( tessellationShader_ ) + , sampleRateShading( sampleRateShading_ ) + , dualSrcBlend( dualSrcBlend_ ) + , logicOp( logicOp_ ) + , multiDrawIndirect( multiDrawIndirect_ ) + , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) + , depthClamp( depthClamp_ ) + , depthBiasClamp( depthBiasClamp_ ) + , fillModeNonSolid( fillModeNonSolid_ ) + , depthBounds( depthBounds_ ) + , wideLines( wideLines_ ) + , largePoints( largePoints_ ) + , alphaToOne( alphaToOne_ ) + , multiViewport( multiViewport_ ) + , samplerAnisotropy( samplerAnisotropy_ ) + , textureCompressionETC2( textureCompressionETC2_ ) + , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) + , textureCompressionBC( textureCompressionBC_ ) + , occlusionQueryPrecise( occlusionQueryPrecise_ ) + , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) + , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) + , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) + , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) + , shaderImageGatherExtended( shaderImageGatherExtended_ ) + , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) + , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) + , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) + , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) + , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) + , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) + , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) + , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) + , shaderClipDistance( shaderClipDistance_ ) + , shaderCullDistance( shaderCullDistance_ ) + , shaderFloat64( shaderFloat64_ ) + , shaderInt64( shaderInt64_ ) + , shaderInt16( shaderInt16_ ) + , shaderResourceResidency( shaderResourceResidency_ ) + , shaderResourceMinLod( shaderResourceMinLod_ ) + , sparseBinding( sparseBinding_ ) + , sparseResidencyBuffer( sparseResidencyBuffer_ ) + , sparseResidencyImage2D( sparseResidencyImage2D_ ) + , sparseResidencyImage3D( sparseResidencyImage3D_ ) + , sparseResidency2Samples( sparseResidency2Samples_ ) + , sparseResidency4Samples( sparseResidency4Samples_ ) + , sparseResidency8Samples( sparseResidency8Samples_ ) + , sparseResidency16Samples( sparseResidency16Samples_ ) + , sparseResidencyAliased( sparseResidencyAliased_ ) + , variableMultisampleRate( variableMultisampleRate_ ) + , inheritedQueries( inheritedQueries_ ) + { + } + + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) ); + } + + PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures ) ); + return *this; + } + PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ ) + { + robustBufferAccess = robustBufferAccess_; + return *this; + } + + PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ ) + { + fullDrawIndexUint32 = fullDrawIndexUint32_; + return *this; + } + + PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ ) + { + imageCubeArray = imageCubeArray_; + return *this; + } + + PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ ) + { + independentBlend = independentBlend_; + return *this; + } + + PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ ) + { + geometryShader = geometryShader_; + return *this; + } + + PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ ) + { + tessellationShader = tessellationShader_; + return *this; + } + + PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ ) + { + sampleRateShading = sampleRateShading_; + return *this; + } + + PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ ) + { + dualSrcBlend = dualSrcBlend_; + return *this; + } + + PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ ) + { + logicOp = logicOp_; + return *this; + } + + PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ ) + { + multiDrawIndirect = multiDrawIndirect_; + return *this; + } + + PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ ) + { + drawIndirectFirstInstance = drawIndirectFirstInstance_; + return *this; + } + + PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ ) + { + depthClamp = depthClamp_; + return *this; + } + + PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ ) + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ ) + { + fillModeNonSolid = fillModeNonSolid_; + return *this; + } + + PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ ) + { + depthBounds = depthBounds_; + return *this; + } + + PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ ) + { + wideLines = wideLines_; + return *this; + } + + PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ ) + { + largePoints = largePoints_; + return *this; + } + + PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ ) + { + alphaToOne = alphaToOne_; + return *this; + } + + PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ ) + { + multiViewport = multiViewport_; + return *this; + } + + PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ ) + { + samplerAnisotropy = samplerAnisotropy_; + return *this; + } + + PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ ) + { + textureCompressionETC2 = textureCompressionETC2_; + return *this; + } + + PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ ) + { + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + return *this; + } + + PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ ) + { + textureCompressionBC = textureCompressionBC_; + return *this; + } + + PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ ) + { + occlusionQueryPrecise = occlusionQueryPrecise_; + return *this; + } + + PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ ) + { + pipelineStatisticsQuery = pipelineStatisticsQuery_; + return *this; + } + + PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ ) + { + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ ) + { + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ ) + { + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; + } + + PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ ) + { + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ ) + { + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ ) + { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ ) + { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ ) + { + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ ) + { + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ ) + { + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ ) + { + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ ) + { + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ ) + { + shaderClipDistance = shaderClipDistance_; + return *this; + } + + PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ ) + { + shaderCullDistance = shaderCullDistance_; + return *this; + } + + PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ ) + { + shaderFloat64 = shaderFloat64_; + return *this; + } + + PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ ) + { + shaderInt64 = shaderInt64_; + return *this; + } + + PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ ) + { + shaderInt16 = shaderInt16_; + return *this; + } + + PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ ) + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } + + PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ ) + { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; + } + + PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ ) + { + sparseBinding = sparseBinding_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ ) + { + sparseResidencyBuffer = sparseResidencyBuffer_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ ) + { + sparseResidencyImage2D = sparseResidencyImage2D_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ ) + { + sparseResidencyImage3D = sparseResidencyImage3D_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ ) + { + sparseResidency2Samples = sparseResidency2Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ ) + { + sparseResidency4Samples = sparseResidency4Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ ) + { + sparseResidency8Samples = sparseResidency8Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ ) + { + sparseResidency16Samples = sparseResidency16Samples_; + return *this; + } + + PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ ) + { + sparseResidencyAliased = sparseResidencyAliased_; + return *this; + } + + PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ ) + { + variableMultisampleRate = variableMultisampleRate_; + return *this; + } + + PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ ) + { + inheritedQueries = inheritedQueries_; + return *this; + } + + operator const VkPhysicalDeviceFeatures&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceFeatures const& rhs ) const + { + return ( robustBufferAccess == rhs.robustBufferAccess ) + && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) + && ( imageCubeArray == rhs.imageCubeArray ) + && ( independentBlend == rhs.independentBlend ) + && ( geometryShader == rhs.geometryShader ) + && ( tessellationShader == rhs.tessellationShader ) + && ( sampleRateShading == rhs.sampleRateShading ) + && ( dualSrcBlend == rhs.dualSrcBlend ) + && ( logicOp == rhs.logicOp ) + && ( multiDrawIndirect == rhs.multiDrawIndirect ) + && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) + && ( depthClamp == rhs.depthClamp ) + && ( depthBiasClamp == rhs.depthBiasClamp ) + && ( fillModeNonSolid == rhs.fillModeNonSolid ) + && ( depthBounds == rhs.depthBounds ) + && ( wideLines == rhs.wideLines ) + && ( largePoints == rhs.largePoints ) + && ( alphaToOne == rhs.alphaToOne ) + && ( multiViewport == rhs.multiViewport ) + && ( samplerAnisotropy == rhs.samplerAnisotropy ) + && ( textureCompressionETC2 == rhs.textureCompressionETC2 ) + && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) + && ( textureCompressionBC == rhs.textureCompressionBC ) + && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) + && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) + && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) + && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) + && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) + && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) + && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) + && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) + && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) + && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) + && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) + && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) + && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) + && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) + && ( shaderClipDistance == rhs.shaderClipDistance ) + && ( shaderCullDistance == rhs.shaderCullDistance ) + && ( shaderFloat64 == rhs.shaderFloat64 ) + && ( shaderInt64 == rhs.shaderInt64 ) + && ( shaderInt16 == rhs.shaderInt16 ) + && ( shaderResourceResidency == rhs.shaderResourceResidency ) + && ( shaderResourceMinLod == rhs.shaderResourceMinLod ) + && ( sparseBinding == rhs.sparseBinding ) + && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) + && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) + && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) + && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) + && ( sparseResidency4Samples == rhs.sparseResidency4Samples ) + && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) + && ( sparseResidency16Samples == rhs.sparseResidency16Samples ) + && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) + && ( variableMultisampleRate == rhs.variableMultisampleRate ) + && ( inheritedQueries == rhs.inheritedQueries ); + } + + bool operator!=( PhysicalDeviceFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + Bool32 robustBufferAccess; + Bool32 fullDrawIndexUint32; + Bool32 imageCubeArray; + Bool32 independentBlend; + Bool32 geometryShader; + Bool32 tessellationShader; + Bool32 sampleRateShading; + Bool32 dualSrcBlend; + Bool32 logicOp; + Bool32 multiDrawIndirect; + Bool32 drawIndirectFirstInstance; + Bool32 depthClamp; + Bool32 depthBiasClamp; + Bool32 fillModeNonSolid; + Bool32 depthBounds; + Bool32 wideLines; + Bool32 largePoints; + Bool32 alphaToOne; + Bool32 multiViewport; + Bool32 samplerAnisotropy; + Bool32 textureCompressionETC2; + Bool32 textureCompressionASTC_LDR; + Bool32 textureCompressionBC; + Bool32 occlusionQueryPrecise; + Bool32 pipelineStatisticsQuery; + Bool32 vertexPipelineStoresAndAtomics; + Bool32 fragmentStoresAndAtomics; + Bool32 shaderTessellationAndGeometryPointSize; + Bool32 shaderImageGatherExtended; + Bool32 shaderStorageImageExtendedFormats; + Bool32 shaderStorageImageMultisample; + Bool32 shaderStorageImageReadWithoutFormat; + Bool32 shaderStorageImageWriteWithoutFormat; + Bool32 shaderUniformBufferArrayDynamicIndexing; + Bool32 shaderSampledImageArrayDynamicIndexing; + Bool32 shaderStorageBufferArrayDynamicIndexing; + Bool32 shaderStorageImageArrayDynamicIndexing; + Bool32 shaderClipDistance; + Bool32 shaderCullDistance; + Bool32 shaderFloat64; + Bool32 shaderInt64; + Bool32 shaderInt16; + Bool32 shaderResourceResidency; + Bool32 shaderResourceMinLod; + Bool32 sparseBinding; + Bool32 sparseResidencyBuffer; + Bool32 sparseResidencyImage2D; + Bool32 sparseResidencyImage3D; + Bool32 sparseResidency2Samples; + Bool32 sparseResidency4Samples; + Bool32 sparseResidency8Samples; + Bool32 sparseResidency16Samples; + Bool32 sparseResidencyAliased; + Bool32 variableMultisampleRate; + Bool32 inheritedQueries; + }; + static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSparseProperties + { + operator const VkPhysicalDeviceSparseProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSparseProperties const& rhs ) const + { + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) + && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) + && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) + && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) + && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); + } + + bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const + { + return !operator==( rhs ); + } + + Bool32 residencyStandard2DBlockShape; + Bool32 residencyStandard2DMultisampleBlockShape; + Bool32 residencyStandard3DBlockShape; + Bool32 residencyAlignedMipSize; + Bool32 residencyNonResidentStrict; + }; + static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); + + struct DrawIndirectCommand + { + DrawIndirectCommand( uint32_t vertexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstVertex_ = 0, uint32_t firstInstance_ = 0 ) + : vertexCount( vertexCount_ ) + , instanceCount( instanceCount_ ) + , firstVertex( firstVertex_ ) + , firstInstance( firstInstance_ ) + { + } + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndirectCommand ) ); + } + + DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndirectCommand ) ); + return *this; + } + DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ ) + { + vertexCount = vertexCount_; + return *this; + } + + DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ ) + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ ) + { + firstVertex = firstVertex_; + return *this; + } + + DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ ) + { + firstInstance = firstInstance_; + return *this; + } + + operator const VkDrawIndirectCommand&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DrawIndirectCommand const& rhs ) const + { + return ( vertexCount == rhs.vertexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstVertex == rhs.firstVertex ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndirectCommand const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; + }; + static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); + + struct DrawIndexedIndirectCommand + { + DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstIndex_ = 0, int32_t vertexOffset_ = 0, uint32_t firstInstance_ = 0 ) + : indexCount( indexCount_ ) + , instanceCount( instanceCount_ ) + , firstIndex( firstIndex_ ) + , vertexOffset( vertexOffset_ ) + , firstInstance( firstInstance_ ) + { + } + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) ); + } + + DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DrawIndexedIndirectCommand ) ); + return *this; + } + DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ ) + { + indexCount = indexCount_; + return *this; + } + + DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ ) + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ ) + { + firstIndex = firstIndex_; + return *this; + } + + DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ ) + { + vertexOffset = vertexOffset_; + return *this; + } + + DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ ) + { + firstInstance = firstInstance_; + return *this; + } + + operator const VkDrawIndexedIndirectCommand&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DrawIndexedIndirectCommand const& rhs ) const + { + return ( indexCount == rhs.indexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstIndex == rhs.firstIndex ) + && ( vertexOffset == rhs.vertexOffset ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndexedIndirectCommand const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; + }; + static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); + + struct DispatchIndirectCommand + { + DispatchIndirectCommand( uint32_t x_ = 0, uint32_t y_ = 0, uint32_t z_ = 0 ) + : x( x_ ) + , y( y_ ) + , z( z_ ) + { + } + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) ); + } + + DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) + { + memcpy( this, &rhs, sizeof( DispatchIndirectCommand ) ); + return *this; + } + DispatchIndirectCommand& setX( uint32_t x_ ) + { + x = x_; + return *this; + } + + DispatchIndirectCommand& setY( uint32_t y_ ) + { + y = y_; + return *this; + } + + DispatchIndirectCommand& setZ( uint32_t z_ ) + { + z = z_; + return *this; + } + + operator const VkDispatchIndirectCommand&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DispatchIndirectCommand const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ); + } + + bool operator!=( DispatchIndirectCommand const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t x; + uint32_t y; + uint32_t z; + }; + static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); + + struct DisplayPlanePropertiesKHR + { + operator const VkDisplayPlanePropertiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPlanePropertiesKHR const& rhs ) const + { + return ( currentDisplay == rhs.currentDisplay ) + && ( currentStackIndex == rhs.currentStackIndex ); + } + + bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayKHR currentDisplay; + uint32_t currentStackIndex; + }; + static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); + + struct DisplayModeParametersKHR + { + DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(), uint32_t refreshRate_ = 0 ) + : visibleRegion( visibleRegion_ ) + , refreshRate( refreshRate_ ) + { + } + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) ); + } + + DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeParametersKHR ) ); + return *this; + } + DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ ) + { + visibleRegion = visibleRegion_; + return *this; + } + + DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ ) + { + refreshRate = refreshRate_; + return *this; + } + + operator const VkDisplayModeParametersKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayModeParametersKHR const& rhs ) const + { + return ( visibleRegion == rhs.visibleRegion ) + && ( refreshRate == rhs.refreshRate ); + } + + bool operator!=( DisplayModeParametersKHR const& rhs ) const + { + return !operator==( rhs ); + } + + Extent2D visibleRegion; + uint32_t refreshRate; + }; + static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); + + struct DisplayModePropertiesKHR + { + operator const VkDisplayModePropertiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayModePropertiesKHR const& rhs ) const + { + return ( displayMode == rhs.displayMode ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModePropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayModeKHR displayMode; + DisplayModeParametersKHR parameters; + }; + static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); + + struct RectLayerKHR + { + RectLayerKHR( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D(), uint32_t layer_ = 0 ) + : offset( offset_ ) + , extent( extent_ ) + , layer( layer_ ) + { + } + + RectLayerKHR( VkRectLayerKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( RectLayerKHR ) ); + } + + RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( RectLayerKHR ) ); + return *this; + } + RectLayerKHR& setOffset( Offset2D offset_ ) + { + offset = offset_; + return *this; + } + + RectLayerKHR& setExtent( Extent2D extent_ ) + { + extent = extent_; + return *this; + } + + RectLayerKHR& setLayer( uint32_t layer_ ) + { + layer = layer_; + return *this; + } + + operator const VkRectLayerKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( RectLayerKHR const& rhs ) const + { + return ( offset == rhs.offset ) + && ( extent == rhs.extent ) + && ( layer == rhs.layer ); + } + + bool operator!=( RectLayerKHR const& rhs ) const + { + return !operator==( rhs ); + } + + Offset2D offset; + Extent2D extent; + uint32_t layer; + }; + static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); + + struct PresentRegionKHR + { + PresentRegionKHR( uint32_t rectangleCount_ = 0, const RectLayerKHR* pRectangles_ = nullptr ) + : rectangleCount( rectangleCount_ ) + , pRectangles( pRectangles_ ) + { + } + + PresentRegionKHR( VkPresentRegionKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionKHR ) ); + } + + PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionKHR ) ); + return *this; + } + PresentRegionKHR& setRectangleCount( uint32_t rectangleCount_ ) + { + rectangleCount = rectangleCount_; + return *this; + } + + PresentRegionKHR& setPRectangles( const RectLayerKHR* pRectangles_ ) + { + pRectangles = pRectangles_; + return *this; + } + + operator const VkPresentRegionKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PresentRegionKHR const& rhs ) const + { + return ( rectangleCount == rhs.rectangleCount ) + && ( pRectangles == rhs.pRectangles ); + } + + bool operator!=( PresentRegionKHR const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t rectangleCount; + const RectLayerKHR* pRectangles; + }; + static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); + + struct XYColorEXT + { + XYColorEXT( float x_ = 0, float y_ = 0 ) + : x( x_ ) + , y( y_ ) + { + } + + XYColorEXT( VkXYColorEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( XYColorEXT ) ); + } + + XYColorEXT& operator=( VkXYColorEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( XYColorEXT ) ); + return *this; + } + XYColorEXT& setX( float x_ ) + { + x = x_; + return *this; + } + + XYColorEXT& setY( float y_ ) + { + y = y_; + return *this; + } + + operator const VkXYColorEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( XYColorEXT const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( XYColorEXT const& rhs ) const + { + return !operator==( rhs ); + } + + float x; + float y; + }; + static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); + + struct RefreshCycleDurationGOOGLE + { + RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = 0 ) + : refreshDuration( refreshDuration_ ) + { + } + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( RefreshCycleDurationGOOGLE ) ); + } + + RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( RefreshCycleDurationGOOGLE ) ); + return *this; + } + RefreshCycleDurationGOOGLE& setRefreshDuration( uint64_t refreshDuration_ ) + { + refreshDuration = refreshDuration_; + return *this; + } + + operator const VkRefreshCycleDurationGOOGLE&() const + { + return *reinterpret_cast(this); + } + + bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const + { + return ( refreshDuration == rhs.refreshDuration ); + } + + bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + uint64_t refreshDuration; + }; + static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); + + struct PastPresentationTimingGOOGLE + { + PastPresentationTimingGOOGLE( uint32_t presentID_ = 0, uint64_t desiredPresentTime_ = 0, uint64_t actualPresentTime_ = 0, uint64_t earliestPresentTime_ = 0, uint64_t presentMargin_ = 0 ) + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + , actualPresentTime( actualPresentTime_ ) + , earliestPresentTime( earliestPresentTime_ ) + , presentMargin( presentMargin_ ) + { + } + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PastPresentationTimingGOOGLE ) ); + } + + PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PastPresentationTimingGOOGLE ) ); + return *this; + } + PastPresentationTimingGOOGLE& setPresentID( uint32_t presentID_ ) + { + presentID = presentID_; + return *this; + } + + PastPresentationTimingGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ ) + { + desiredPresentTime = desiredPresentTime_; + return *this; + } + + PastPresentationTimingGOOGLE& setActualPresentTime( uint64_t actualPresentTime_ ) + { + actualPresentTime = actualPresentTime_; + return *this; + } + + PastPresentationTimingGOOGLE& setEarliestPresentTime( uint64_t earliestPresentTime_ ) + { + earliestPresentTime = earliestPresentTime_; + return *this; + } + + PastPresentationTimingGOOGLE& setPresentMargin( uint64_t presentMargin_ ) + { + presentMargin = presentMargin_; + return *this; + } + + operator const VkPastPresentationTimingGOOGLE&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PastPresentationTimingGOOGLE const& rhs ) const + { + return ( presentID == rhs.presentID ) + && ( desiredPresentTime == rhs.desiredPresentTime ) + && ( actualPresentTime == rhs.actualPresentTime ) + && ( earliestPresentTime == rhs.earliestPresentTime ) + && ( presentMargin == rhs.presentMargin ); + } + + bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; + }; + static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); + + struct PresentTimeGOOGLE + { + PresentTimeGOOGLE( uint32_t presentID_ = 0, uint64_t desiredPresentTime_ = 0 ) + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + { + } + + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) ); + } + + PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimeGOOGLE ) ); + return *this; + } + PresentTimeGOOGLE& setPresentID( uint32_t presentID_ ) + { + presentID = presentID_; + return *this; + } + + PresentTimeGOOGLE& setDesiredPresentTime( uint64_t desiredPresentTime_ ) + { + desiredPresentTime = desiredPresentTime_; + return *this; + } + + operator const VkPresentTimeGOOGLE&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PresentTimeGOOGLE const& rhs ) const + { + return ( presentID == rhs.presentID ) + && ( desiredPresentTime == rhs.desiredPresentTime ); + } + + bool operator!=( PresentTimeGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t presentID; + uint64_t desiredPresentTime; + }; + static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); + + struct ViewportWScalingNV + { + ViewportWScalingNV( float xcoeff_ = 0, float ycoeff_ = 0 ) + : xcoeff( xcoeff_ ) + , ycoeff( ycoeff_ ) + { + } + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportWScalingNV ) ); + } + + ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportWScalingNV ) ); + return *this; + } + ViewportWScalingNV& setXcoeff( float xcoeff_ ) + { + xcoeff = xcoeff_; + return *this; + } + + ViewportWScalingNV& setYcoeff( float ycoeff_ ) + { + ycoeff = ycoeff_; + return *this; + } + + operator const VkViewportWScalingNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ViewportWScalingNV const& rhs ) const + { + return ( xcoeff == rhs.xcoeff ) + && ( ycoeff == rhs.ycoeff ); + } + + bool operator!=( ViewportWScalingNV const& rhs ) const + { + return !operator==( rhs ); + } + + float xcoeff; + float ycoeff; + }; + static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); + + struct SampleLocationEXT + { + SampleLocationEXT( float x_ = 0, float y_ = 0 ) + : x( x_ ) + , y( y_ ) + { + } + + SampleLocationEXT( VkSampleLocationEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationEXT ) ); + } + + SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationEXT ) ); + return *this; + } + SampleLocationEXT& setX( float x_ ) + { + x = x_; + return *this; + } + + SampleLocationEXT& setY( float y_ ) + { + y = y_; + return *this; + } + + operator const VkSampleLocationEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SampleLocationEXT const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( SampleLocationEXT const& rhs ) const + { + return !operator==( rhs ); + } + + float x; + float y; + }; + static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); + + struct ShaderResourceUsageAMD + { + operator const VkShaderResourceUsageAMD&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderResourceUsageAMD const& rhs ) const + { + return ( numUsedVgprs == rhs.numUsedVgprs ) + && ( numUsedSgprs == rhs.numUsedSgprs ) + && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) + && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) + && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); + } + + bool operator!=( ShaderResourceUsageAMD const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; + }; + static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); + + struct VertexInputBindingDivisorDescriptionEXT + { + VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = 0, uint32_t divisor_ = 0 ) + : binding( binding_ ) + , divisor( divisor_ ) + { + } + + VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) ); + } + + VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) ); + return *this; + } + VertexInputBindingDivisorDescriptionEXT& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + VertexInputBindingDivisorDescriptionEXT& setDivisor( uint32_t divisor_ ) + { + divisor = divisor_; + return *this; + } + + operator const VkVertexInputBindingDivisorDescriptionEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const + { + return ( binding == rhs.binding ) + && ( divisor == rhs.divisor ); + } + + bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t binding; + uint32_t divisor; + }; + static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" ); + + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR + }; + + struct DescriptorImageInfo + { + DescriptorImageInfo( Sampler sampler_ = Sampler(), ImageView imageView_ = ImageView(), ImageLayout imageLayout_ = ImageLayout::eUndefined ) + : sampler( sampler_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + { + } + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorImageInfo ) ); + } + + DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorImageInfo ) ); + return *this; + } + DescriptorImageInfo& setSampler( Sampler sampler_ ) + { + sampler = sampler_; + return *this; + } + + DescriptorImageInfo& setImageView( ImageView imageView_ ) + { + imageView = imageView_; + return *this; + } + + DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ ) + { + imageLayout = imageLayout_; + return *this; + } + + operator const VkDescriptorImageInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorImageInfo const& rhs ) const + { + return ( sampler == rhs.sampler ) + && ( imageView == rhs.imageView ) + && ( imageLayout == rhs.imageLayout ); + } + + bool operator!=( DescriptorImageInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Sampler sampler; + ImageView imageView; + ImageLayout imageLayout; + }; + static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); + + struct AttachmentReference + { + AttachmentReference( uint32_t attachment_ = 0, ImageLayout layout_ = ImageLayout::eUndefined ) + : attachment( attachment_ ) + , layout( layout_ ) + { + } + + AttachmentReference( VkAttachmentReference const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentReference ) ); + } + + AttachmentReference& operator=( VkAttachmentReference const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentReference ) ); + return *this; + } + AttachmentReference& setAttachment( uint32_t attachment_ ) + { + attachment = attachment_; + return *this; + } + + AttachmentReference& setLayout( ImageLayout layout_ ) + { + layout = layout_; + return *this; + } + + operator const VkAttachmentReference&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentReference const& rhs ) const + { + return ( attachment == rhs.attachment ) + && ( layout == rhs.layout ); + } + + bool operator!=( AttachmentReference const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t attachment; + ImageLayout layout; + }; + static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); + + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE + }; + + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE + }; + + enum class ImageType + { + e1D = VK_IMAGE_TYPE_1D, + e2D = VK_IMAGE_TYPE_2D, + e3D = VK_IMAGE_TYPE_3D + }; + + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR + }; + + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + struct ComponentMapping + { + ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, ComponentSwizzle g_ = ComponentSwizzle::eIdentity, ComponentSwizzle b_ = ComponentSwizzle::eIdentity, ComponentSwizzle a_ = ComponentSwizzle::eIdentity ) + : r( r_ ) + , g( g_ ) + , b( b_ ) + , a( a_ ) + { + } + + ComponentMapping( VkComponentMapping const & rhs ) + { + memcpy( this, &rhs, sizeof( ComponentMapping ) ); + } + + ComponentMapping& operator=( VkComponentMapping const & rhs ) + { + memcpy( this, &rhs, sizeof( ComponentMapping ) ); + return *this; + } + ComponentMapping& setR( ComponentSwizzle r_ ) + { + r = r_; + return *this; + } + + ComponentMapping& setG( ComponentSwizzle g_ ) + { + g = g_; + return *this; + } + + ComponentMapping& setB( ComponentSwizzle b_ ) + { + b = b_; + return *this; + } + + ComponentMapping& setA( ComponentSwizzle a_ ) + { + a = a_; + return *this; + } + + operator const VkComponentMapping&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ComponentMapping const& rhs ) const + { + return ( r == rhs.r ) + && ( g == rhs.g ) + && ( b == rhs.b ) + && ( a == rhs.a ); + } + + bool operator!=( ComponentMapping const& rhs ) const + { + return !operator==( rhs ); + } + + ComponentSwizzle r; + ComponentSwizzle g; + ComponentSwizzle b; + ComponentSwizzle a; + }; + static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); + + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT + }; + + struct DescriptorPoolSize + { + DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0 ) + : type( type_ ) + , descriptorCount( descriptorCount_ ) + { + } + + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolSize ) ); + } + + DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolSize ) ); + return *this; + } + DescriptorPoolSize& setType( DescriptorType type_ ) + { + type = type_; + return *this; + } + + DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + operator const VkDescriptorPoolSize&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorPoolSize const& rhs ) const + { + return ( type == rhs.type ) + && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( DescriptorPoolSize const& rhs ) const + { + return !operator==( rhs ); + } + + DescriptorType type; + uint32_t descriptorCount; + }; + static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); + + struct DescriptorUpdateTemplateEntry + { + DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, size_t offset_ = 0, size_t stride_ = 0 ) + : dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , offset( offset_ ) + , stride( stride_ ) + { + } + + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) ); + } + + DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateEntry ) ); + return *this; + } + DescriptorUpdateTemplateEntry& setDstBinding( uint32_t dstBinding_ ) + { + dstBinding = dstBinding_; + return *this; + } + + DescriptorUpdateTemplateEntry& setDstArrayElement( uint32_t dstArrayElement_ ) + { + dstArrayElement = dstArrayElement_; + return *this; + } + + DescriptorUpdateTemplateEntry& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorUpdateTemplateEntry& setDescriptorType( DescriptorType descriptorType_ ) + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorUpdateTemplateEntry& setOffset( size_t offset_ ) + { + offset = offset_; + return *this; + } + + DescriptorUpdateTemplateEntry& setStride( size_t stride_ ) + { + stride = stride_; + return *this; + } + + operator const VkDescriptorUpdateTemplateEntry&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const + { + return ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ) + && ( descriptorType == rhs.descriptorType ) + && ( offset == rhs.offset ) + && ( stride == rhs.stride ); + } + + bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + DescriptorType descriptorType; + size_t offset; + size_t stride; + }; + static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" ); + + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP + }; + + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE + }; + + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE + }; + + enum class PipelineCacheHeaderVersion + { + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE + }; + + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32 + }; + + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicIMG = VK_FILTER_CUBIC_IMG + }; + + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE + }; + + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + enum class CullModeFlagBits + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + using CullModeFlags = Flags; + + VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) + { + return CullModeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits ) + { + return ~( CullModeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack) + }; + }; + + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + struct StencilOpState + { + StencilOpState( StencilOp failOp_ = StencilOp::eKeep, StencilOp passOp_ = StencilOp::eKeep, StencilOp depthFailOp_ = StencilOp::eKeep, CompareOp compareOp_ = CompareOp::eNever, uint32_t compareMask_ = 0, uint32_t writeMask_ = 0, uint32_t reference_ = 0 ) + : failOp( failOp_ ) + , passOp( passOp_ ) + , depthFailOp( depthFailOp_ ) + , compareOp( compareOp_ ) + , compareMask( compareMask_ ) + , writeMask( writeMask_ ) + , reference( reference_ ) + { + } + + StencilOpState( VkStencilOpState const & rhs ) + { + memcpy( this, &rhs, sizeof( StencilOpState ) ); + } + + StencilOpState& operator=( VkStencilOpState const & rhs ) + { + memcpy( this, &rhs, sizeof( StencilOpState ) ); + return *this; + } + StencilOpState& setFailOp( StencilOp failOp_ ) + { + failOp = failOp_; + return *this; + } + + StencilOpState& setPassOp( StencilOp passOp_ ) + { + passOp = passOp_; + return *this; + } + + StencilOpState& setDepthFailOp( StencilOp depthFailOp_ ) + { + depthFailOp = depthFailOp_; + return *this; + } + + StencilOpState& setCompareOp( CompareOp compareOp_ ) + { + compareOp = compareOp_; + return *this; + } + + StencilOpState& setCompareMask( uint32_t compareMask_ ) + { + compareMask = compareMask_; + return *this; + } + + StencilOpState& setWriteMask( uint32_t writeMask_ ) + { + writeMask = writeMask_; + return *this; + } + + StencilOpState& setReference( uint32_t reference_ ) + { + reference = reference_; + return *this; + } + + operator const VkStencilOpState&() const + { + return *reinterpret_cast(this); + } + + bool operator==( StencilOpState const& rhs ) const + { + return ( failOp == rhs.failOp ) + && ( passOp == rhs.passOp ) + && ( depthFailOp == rhs.depthFailOp ) + && ( compareOp == rhs.compareOp ) + && ( compareMask == rhs.compareMask ) + && ( writeMask == rhs.writeMask ) + && ( reference == rhs.reference ); + } + + bool operator!=( StencilOpState const& rhs ) const + { + return !operator==( rhs ); + } + + StencilOp failOp; + StencilOp passOp; + StencilOp depthFailOp; + CompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; + }; + static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); + + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + enum class InternalAllocationType + { + eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + }; + + enum class SystemAllocationScope + { + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE + }; + + enum class PhysicalDeviceType + { + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + }; + + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + struct VertexInputBindingDescription + { + VertexInputBindingDescription( uint32_t binding_ = 0, uint32_t stride_ = 0, VertexInputRate inputRate_ = VertexInputRate::eVertex ) + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + { + } + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) ); + } + + VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputBindingDescription ) ); + return *this; + } + VertexInputBindingDescription& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + VertexInputBindingDescription& setStride( uint32_t stride_ ) + { + stride = stride_; + return *this; + } + + VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ ) + { + inputRate = inputRate_; + return *this; + } + + operator const VkVertexInputBindingDescription&() const + { + return *reinterpret_cast(this); + } + + bool operator==( VertexInputBindingDescription const& rhs ) const + { + return ( binding == rhs.binding ) + && ( stride == rhs.stride ) + && ( inputRate == rhs.inputRate ); + } + + bool operator!=( VertexInputBindingDescription const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t binding; + uint32_t stride; + VertexInputRate inputRate; + }; + static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); + + enum class Format + { + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG + }; + + struct VertexInputAttributeDescription + { + VertexInputAttributeDescription( uint32_t location_ = 0, uint32_t binding_ = 0, Format format_ = Format::eUndefined, uint32_t offset_ = 0 ) + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + { + } + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) ); + } + + VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( VertexInputAttributeDescription ) ); + return *this; + } + VertexInputAttributeDescription& setLocation( uint32_t location_ ) + { + location = location_; + return *this; + } + + VertexInputAttributeDescription& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + VertexInputAttributeDescription& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + VertexInputAttributeDescription& setOffset( uint32_t offset_ ) + { + offset = offset_; + return *this; + } + + operator const VkVertexInputAttributeDescription&() const + { + return *reinterpret_cast(this); + } + + bool operator==( VertexInputAttributeDescription const& rhs ) const + { + return ( location == rhs.location ) + && ( binding == rhs.binding ) + && ( format == rhs.format ) + && ( offset == rhs.offset ); + } + + bool operator!=( VertexInputAttributeDescription const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t location; + uint32_t binding; + Format format; + uint32_t offset; + }; + static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); + + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, + eMirSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR, + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, + eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, + eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, + eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, + eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, + eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT + }; + + struct ApplicationInfo + { + ApplicationInfo( const char* pApplicationName_ = nullptr, uint32_t applicationVersion_ = 0, const char* pEngineName_ = nullptr, uint32_t engineVersion_ = 0, uint32_t apiVersion_ = 0 ) + : pApplicationName( pApplicationName_ ) + , applicationVersion( applicationVersion_ ) + , pEngineName( pEngineName_ ) + , engineVersion( engineVersion_ ) + , apiVersion( apiVersion_ ) + { + } + + ApplicationInfo( VkApplicationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ApplicationInfo ) ); + } + + ApplicationInfo& operator=( VkApplicationInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ApplicationInfo ) ); + return *this; + } + ApplicationInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ApplicationInfo& setPApplicationName( const char* pApplicationName_ ) + { + pApplicationName = pApplicationName_; + return *this; + } + + ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ ) + { + applicationVersion = applicationVersion_; + return *this; + } + + ApplicationInfo& setPEngineName( const char* pEngineName_ ) + { + pEngineName = pEngineName_; + return *this; + } + + ApplicationInfo& setEngineVersion( uint32_t engineVersion_ ) + { + engineVersion = engineVersion_; + return *this; + } + + ApplicationInfo& setApiVersion( uint32_t apiVersion_ ) + { + apiVersion = apiVersion_; + return *this; + } + + operator const VkApplicationInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ApplicationInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pApplicationName == rhs.pApplicationName ) + && ( applicationVersion == rhs.applicationVersion ) + && ( pEngineName == rhs.pEngineName ) + && ( engineVersion == rhs.engineVersion ) + && ( apiVersion == rhs.apiVersion ); + } + + bool operator!=( ApplicationInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eApplicationInfo; + + public: + const void* pNext = nullptr; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; + }; + static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); + + struct InstanceCreateInfo + { + InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(), const ApplicationInfo* pApplicationInfo_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr ) + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + { + } + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( InstanceCreateInfo ) ); + } + + InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( InstanceCreateInfo ) ); + return *this; + } + InstanceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ ) + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ ) + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + + InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + + operator const VkInstanceCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( InstanceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pApplicationInfo == rhs.pApplicationInfo ) + && ( enabledLayerCount == rhs.enabledLayerCount ) + && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) + && ( enabledExtensionCount == rhs.enabledExtensionCount ) + && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); + } + + bool operator!=( InstanceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eInstanceCreateInfo; + + public: + const void* pNext = nullptr; + InstanceCreateFlags flags; + const ApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + }; + static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); + + struct MemoryAllocateInfo + { + MemoryAllocateInfo( DeviceSize allocationSize_ = 0, uint32_t memoryTypeIndex_ = 0 ) + : allocationSize( allocationSize_ ) + , memoryTypeIndex( memoryTypeIndex_ ) + { + } + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) ); + } + + MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateInfo ) ); + return *this; + } + MemoryAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ ) + { + allocationSize = allocationSize_; + return *this; + } + + MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } + + operator const VkMemoryAllocateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( allocationSize == rhs.allocationSize ) + && ( memoryTypeIndex == rhs.memoryTypeIndex ); + } + + bool operator!=( MemoryAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryAllocateInfo; + + public: + const void* pNext = nullptr; + DeviceSize allocationSize; + uint32_t memoryTypeIndex; + }; + static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); + + struct MappedMemoryRange + { + MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(), DeviceSize offset_ = 0, DeviceSize size_ = 0 ) + : memory( memory_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) + { + memcpy( this, &rhs, sizeof( MappedMemoryRange ) ); + } + + MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) + { + memcpy( this, &rhs, sizeof( MappedMemoryRange ) ); + return *this; + } + MappedMemoryRange& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MappedMemoryRange& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + MappedMemoryRange& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + MappedMemoryRange& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + operator const VkMappedMemoryRange&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MappedMemoryRange const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( MappedMemoryRange const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMappedMemoryRange; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + DeviceSize offset; + DeviceSize size; + }; + static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); + + struct WriteDescriptorSet + { + WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, const DescriptorImageInfo* pImageInfo_ = nullptr, const DescriptorBufferInfo* pBufferInfo_ = nullptr, const BufferView* pTexelBufferView_ = nullptr ) + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , pImageInfo( pImageInfo_ ) + , pBufferInfo( pBufferInfo_ ) + , pTexelBufferView( pTexelBufferView_ ) + { + } + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSet ) ); + } + + WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( WriteDescriptorSet ) ); + return *this; + } + WriteDescriptorSet& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ ) + { + dstSet = dstSet_; + return *this; + } + + WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ ) + { + dstBinding = dstBinding_; + return *this; + } + + WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ ) + { + dstArrayElement = dstArrayElement_; + return *this; + } + + WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ ) + { + descriptorType = descriptorType_; + return *this; + } + + WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ ) + { + pImageInfo = pImageInfo_; + return *this; + } + + WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ ) + { + pBufferInfo = pBufferInfo_; + return *this; + } + + WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ ) + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + + operator const VkWriteDescriptorSet&() const + { + return *reinterpret_cast(this); + } + + bool operator==( WriteDescriptorSet const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dstSet == rhs.dstSet ) + && ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ) + && ( descriptorType == rhs.descriptorType ) + && ( pImageInfo == rhs.pImageInfo ) + && ( pBufferInfo == rhs.pBufferInfo ) + && ( pTexelBufferView == rhs.pTexelBufferView ); + } + + bool operator!=( WriteDescriptorSet const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWriteDescriptorSet; + + public: + const void* pNext = nullptr; + DescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + DescriptorType descriptorType; + const DescriptorImageInfo* pImageInfo; + const DescriptorBufferInfo* pBufferInfo; + const BufferView* pTexelBufferView; + }; + static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); + + struct CopyDescriptorSet + { + CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(), uint32_t srcBinding_ = 0, uint32_t srcArrayElement_ = 0, DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0 ) + : srcSet( srcSet_ ) + , srcBinding( srcBinding_ ) + , srcArrayElement( srcArrayElement_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + { + } + + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( CopyDescriptorSet ) ); + } + + CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) + { + memcpy( this, &rhs, sizeof( CopyDescriptorSet ) ); + return *this; + } + CopyDescriptorSet& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ ) + { + srcSet = srcSet_; + return *this; + } + + CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ ) + { + srcBinding = srcBinding_; + return *this; + } + + CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ ) + { + srcArrayElement = srcArrayElement_; + return *this; + } + + CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ ) + { + dstSet = dstSet_; + return *this; + } + + CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ ) + { + dstBinding = dstBinding_; + return *this; + } + + CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ ) + { + dstArrayElement = dstArrayElement_; + return *this; + } + + CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + operator const VkCopyDescriptorSet&() const + { + return *reinterpret_cast(this); + } + + bool operator==( CopyDescriptorSet const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSet == rhs.srcSet ) + && ( srcBinding == rhs.srcBinding ) + && ( srcArrayElement == rhs.srcArrayElement ) + && ( dstSet == rhs.dstSet ) + && ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( CopyDescriptorSet const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCopyDescriptorSet; + + public: + const void* pNext = nullptr; + DescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + DescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + }; + static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); + + struct BufferViewCreateInfo + { + BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(), Buffer buffer_ = Buffer(), Format format_ = Format::eUndefined, DeviceSize offset_ = 0, DeviceSize range_ = 0 ) + : flags( flags_ ) + , buffer( buffer_ ) + , format( format_ ) + , offset( offset_ ) + , range( range_ ) + { + } + + BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) ); + } + + BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferViewCreateInfo ) ); + return *this; + } + BufferViewCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + BufferViewCreateInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + BufferViewCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + BufferViewCreateInfo& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + BufferViewCreateInfo& setRange( DeviceSize range_ ) + { + range = range_; + return *this; + } + + operator const VkBufferViewCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BufferViewCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ) + && ( format == rhs.format ) + && ( offset == rhs.offset ) + && ( range == rhs.range ); + } + + bool operator!=( BufferViewCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferViewCreateInfo; + + public: + const void* pNext = nullptr; + BufferViewCreateFlags flags; + Buffer buffer; + Format format; + DeviceSize offset; + DeviceSize range; + }; + static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); + + struct ShaderModuleCreateInfo + { + ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(), size_t codeSize_ = 0, const uint32_t* pCode_ = nullptr ) + : flags( flags_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + { + } + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) ); + } + + ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleCreateInfo ) ); + return *this; + } + ShaderModuleCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ ) + { + codeSize = codeSize_; + return *this; + } + + ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ ) + { + pCode = pCode_; + return *this; + } + + operator const VkShaderModuleCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderModuleCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( codeSize == rhs.codeSize ) + && ( pCode == rhs.pCode ); + } + + bool operator!=( ShaderModuleCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eShaderModuleCreateInfo; + + public: + const void* pNext = nullptr; + ShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; + }; + static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); + + struct DescriptorSetAllocateInfo + { + DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(), uint32_t descriptorSetCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr ) + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pSetLayouts( pSetLayouts_ ) + { + } + + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) ); + } + + DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetAllocateInfo ) ); + return *this; + } + DescriptorSetAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ ) + { + descriptorPool = descriptorPool_; + return *this; + } + + DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ ) + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ ) + { + pSetLayouts = pSetLayouts_; + return *this; + } + + operator const VkDescriptorSetAllocateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( descriptorPool == rhs.descriptorPool ) + && ( descriptorSetCount == rhs.descriptorSetCount ) + && ( pSetLayouts == rhs.pSetLayouts ); + } + + bool operator!=( DescriptorSetAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetAllocateInfo; + + public: + const void* pNext = nullptr; + DescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const DescriptorSetLayout* pSetLayouts; + }; + static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" ); + + struct PipelineVertexInputStateCreateInfo + { + PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(), uint32_t vertexBindingDescriptionCount_ = 0, const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, uint32_t vertexAttributeDescriptionCount_ = 0, const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr ) + : flags( flags_ ) + , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) + , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) + , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) + , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + { + } + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) ); + } + + PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputStateCreateInfo ) ); + return *this; + } + PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ ) + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + + operator const VkPipelineVertexInputStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) + && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) + && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) + && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + } + + bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VertexInputAttributeDescription* pVertexAttributeDescriptions; + }; + static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineInputAssemblyStateCreateInfo + { + PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(), PrimitiveTopology topology_ = PrimitiveTopology::ePointList, Bool32 primitiveRestartEnable_ = 0 ) + : flags( flags_ ) + , topology( topology_ ) + , primitiveRestartEnable( primitiveRestartEnable_ ) + { + } + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) ); + } + + PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineInputAssemblyStateCreateInfo ) ); + return *this; + } + PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ ) + { + topology = topology_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ ) + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } + + operator const VkPipelineInputAssemblyStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( topology == rhs.topology ) + && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineInputAssemblyStateCreateFlags flags; + PrimitiveTopology topology; + Bool32 primitiveRestartEnable; + }; + static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineTessellationStateCreateInfo + { + PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(), uint32_t patchControlPoints_ = 0 ) + : flags( flags_ ) + , patchControlPoints( patchControlPoints_ ) + { + } + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) ); + } + + PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationStateCreateInfo ) ); + return *this; + } + PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ ) + { + patchControlPoints = patchControlPoints_; + return *this; + } + + operator const VkPipelineTessellationStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( patchControlPoints == rhs.patchControlPoints ); + } + + bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; + }; + static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineViewportStateCreateInfo + { + PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(), uint32_t viewportCount_ = 0, const Viewport* pViewports_ = nullptr, uint32_t scissorCount_ = 0, const Rect2D* pScissors_ = nullptr ) + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewports( pViewports_ ) + , scissorCount( scissorCount_ ) + , pScissors( pScissors_ ) + { + } + + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) ); + } + + PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportStateCreateInfo ) ); + return *this; + } + PipelineViewportStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ ) + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ ) + { + pViewports = pViewports_; + return *this; + } + + PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ ) + { + scissorCount = scissorCount_; + return *this; + } + + PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ ) + { + pScissors = pScissors_; + return *this; + } + + operator const VkPipelineViewportStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewports == rhs.pViewports ) + && ( scissorCount == rhs.scissorCount ) + && ( pScissors == rhs.pScissors ); + } + + bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const Viewport* pViewports; + uint32_t scissorCount; + const Rect2D* pScissors; + }; + static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineRasterizationStateCreateInfo + { + PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(), Bool32 depthClampEnable_ = 0, Bool32 rasterizerDiscardEnable_ = 0, PolygonMode polygonMode_ = PolygonMode::eFill, CullModeFlags cullMode_ = CullModeFlags(), FrontFace frontFace_ = FrontFace::eCounterClockwise, Bool32 depthBiasEnable_ = 0, float depthBiasConstantFactor_ = 0, float depthBiasClamp_ = 0, float depthBiasSlopeFactor_ = 0, float lineWidth_ = 0 ) + : flags( flags_ ) + , depthClampEnable( depthClampEnable_ ) + , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) + , polygonMode( polygonMode_ ) + , cullMode( cullMode_ ) + , frontFace( frontFace_ ) + , depthBiasEnable( depthBiasEnable_ ) + , depthBiasConstantFactor( depthBiasConstantFactor_ ) + , depthBiasClamp( depthBiasClamp_ ) + , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + , lineWidth( lineWidth_ ) + { + } + + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) ); + } + + PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateCreateInfo ) ); + return *this; + } + PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ ) + { + depthClampEnable = depthClampEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ ) + { + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ ) + { + polygonMode = polygonMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ ) + { + cullMode = cullMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ ) + { + frontFace = frontFace_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ ) + { + depthBiasEnable = depthBiasEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ ) + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ ) + { + lineWidth = lineWidth_; + return *this; + } + + operator const VkPipelineRasterizationStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( depthClampEnable == rhs.depthClampEnable ) + && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) + && ( polygonMode == rhs.polygonMode ) + && ( cullMode == rhs.cullMode ) + && ( frontFace == rhs.frontFace ) + && ( depthBiasEnable == rhs.depthBiasEnable ) + && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) + && ( depthBiasClamp == rhs.depthBiasClamp ) + && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) + && ( lineWidth == rhs.lineWidth ); + } + + bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineRasterizationStateCreateFlags flags; + Bool32 depthClampEnable; + Bool32 rasterizerDiscardEnable; + PolygonMode polygonMode; + CullModeFlags cullMode; + FrontFace frontFace; + Bool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; + }; + static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineDepthStencilStateCreateInfo + { + PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(), Bool32 depthTestEnable_ = 0, Bool32 depthWriteEnable_ = 0, CompareOp depthCompareOp_ = CompareOp::eNever, Bool32 depthBoundsTestEnable_ = 0, Bool32 stencilTestEnable_ = 0, StencilOpState front_ = StencilOpState(), StencilOpState back_ = StencilOpState(), float minDepthBounds_ = 0, float maxDepthBounds_ = 0 ) + : flags( flags_ ) + , depthTestEnable( depthTestEnable_ ) + , depthWriteEnable( depthWriteEnable_ ) + , depthCompareOp( depthCompareOp_ ) + , depthBoundsTestEnable( depthBoundsTestEnable_ ) + , stencilTestEnable( stencilTestEnable_ ) + , front( front_ ) + , back( back_ ) + , minDepthBounds( minDepthBounds_ ) + , maxDepthBounds( maxDepthBounds_ ) + { + } + + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) ); + } + + PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDepthStencilStateCreateInfo ) ); + return *this; + } + PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ ) + { + depthTestEnable = depthTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ ) + { + depthWriteEnable = depthWriteEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ ) + { + depthCompareOp = depthCompareOp_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ ) + { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ ) + { + stencilTestEnable = stencilTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ ) + { + front = front_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ ) + { + back = back_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ ) + { + minDepthBounds = minDepthBounds_; + return *this; + } + + PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ ) + { + maxDepthBounds = maxDepthBounds_; + return *this; + } + + operator const VkPipelineDepthStencilStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( depthTestEnable == rhs.depthTestEnable ) + && ( depthWriteEnable == rhs.depthWriteEnable ) + && ( depthCompareOp == rhs.depthCompareOp ) + && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) + && ( stencilTestEnable == rhs.stencilTestEnable ) + && ( front == rhs.front ) + && ( back == rhs.back ) + && ( minDepthBounds == rhs.minDepthBounds ) + && ( maxDepthBounds == rhs.maxDepthBounds ); + } + + bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineDepthStencilStateCreateFlags flags; + Bool32 depthTestEnable; + Bool32 depthWriteEnable; + CompareOp depthCompareOp; + Bool32 depthBoundsTestEnable; + Bool32 stencilTestEnable; + StencilOpState front; + StencilOpState back; + float minDepthBounds; + float maxDepthBounds; + }; + static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineCacheCreateInfo + { + PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr ) + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + { + } + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) ); + } + + PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCacheCreateInfo ) ); + return *this; + } + PipelineCacheCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ ) + { + initialDataSize = initialDataSize_; + return *this; + } + + PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ ) + { + pInitialData = pInitialData_; + return *this; + } + + operator const VkPipelineCacheCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineCacheCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( PipelineCacheCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineCacheCreateInfo; + + public: + const void* pNext = nullptr; + PipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; + }; + static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); + + struct SamplerCreateInfo + { + SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(), Filter magFilter_ = Filter::eNearest, Filter minFilter_ = Filter::eNearest, SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, float mipLodBias_ = 0, Bool32 anisotropyEnable_ = 0, float maxAnisotropy_ = 0, Bool32 compareEnable_ = 0, CompareOp compareOp_ = CompareOp::eNever, float minLod_ = 0, float maxLod_ = 0, BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, Bool32 unnormalizedCoordinates_ = 0 ) + : flags( flags_ ) + , magFilter( magFilter_ ) + , minFilter( minFilter_ ) + , mipmapMode( mipmapMode_ ) + , addressModeU( addressModeU_ ) + , addressModeV( addressModeV_ ) + , addressModeW( addressModeW_ ) + , mipLodBias( mipLodBias_ ) + , anisotropyEnable( anisotropyEnable_ ) + , maxAnisotropy( maxAnisotropy_ ) + , compareEnable( compareEnable_ ) + , compareOp( compareOp_ ) + , minLod( minLod_ ) + , maxLod( maxLod_ ) + , borderColor( borderColor_ ) + , unnormalizedCoordinates( unnormalizedCoordinates_ ) + { + } + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerCreateInfo ) ); + } + + SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerCreateInfo ) ); + return *this; + } + SamplerCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + SamplerCreateInfo& setMagFilter( Filter magFilter_ ) + { + magFilter = magFilter_; + return *this; + } + + SamplerCreateInfo& setMinFilter( Filter minFilter_ ) + { + minFilter = minFilter_; + return *this; + } + + SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ ) + { + mipmapMode = mipmapMode_; + return *this; + } + + SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ ) + { + addressModeU = addressModeU_; + return *this; + } + + SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ ) + { + addressModeV = addressModeV_; + return *this; + } + + SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ ) + { + addressModeW = addressModeW_; + return *this; + } + + SamplerCreateInfo& setMipLodBias( float mipLodBias_ ) + { + mipLodBias = mipLodBias_; + return *this; + } + + SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ ) + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ ) + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ ) + { + compareEnable = compareEnable_; + return *this; + } + + SamplerCreateInfo& setCompareOp( CompareOp compareOp_ ) + { + compareOp = compareOp_; + return *this; + } + + SamplerCreateInfo& setMinLod( float minLod_ ) + { + minLod = minLod_; + return *this; + } + + SamplerCreateInfo& setMaxLod( float maxLod_ ) + { + maxLod = maxLod_; + return *this; + } + + SamplerCreateInfo& setBorderColor( BorderColor borderColor_ ) + { + borderColor = borderColor_; + return *this; + } + + SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } + + operator const VkSamplerCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( magFilter == rhs.magFilter ) + && ( minFilter == rhs.minFilter ) + && ( mipmapMode == rhs.mipmapMode ) + && ( addressModeU == rhs.addressModeU ) + && ( addressModeV == rhs.addressModeV ) + && ( addressModeW == rhs.addressModeW ) + && ( mipLodBias == rhs.mipLodBias ) + && ( anisotropyEnable == rhs.anisotropyEnable ) + && ( maxAnisotropy == rhs.maxAnisotropy ) + && ( compareEnable == rhs.compareEnable ) + && ( compareOp == rhs.compareOp ) + && ( minLod == rhs.minLod ) + && ( maxLod == rhs.maxLod ) + && ( borderColor == rhs.borderColor ) + && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + } + + bool operator!=( SamplerCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerCreateInfo; + + public: + const void* pNext = nullptr; + SamplerCreateFlags flags; + Filter magFilter; + Filter minFilter; + SamplerMipmapMode mipmapMode; + SamplerAddressMode addressModeU; + SamplerAddressMode addressModeV; + SamplerAddressMode addressModeW; + float mipLodBias; + Bool32 anisotropyEnable; + float maxAnisotropy; + Bool32 compareEnable; + CompareOp compareOp; + float minLod; + float maxLod; + BorderColor borderColor; + Bool32 unnormalizedCoordinates; + }; + static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); + + struct CommandBufferAllocateInfo + { + CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(), CommandBufferLevel level_ = CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = 0 ) + : commandPool( commandPool_ ) + , level( level_ ) + , commandBufferCount( commandBufferCount_ ) + { + } + + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) ); + } + + CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferAllocateInfo ) ); + return *this; + } + CommandBufferAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ ) + { + commandPool = commandPool_; + return *this; + } + + CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ ) + { + level = level_; + return *this; + } + + CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ ) + { + commandBufferCount = commandBufferCount_; + return *this; + } + + operator const VkCommandBufferAllocateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( CommandBufferAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( commandPool == rhs.commandPool ) + && ( level == rhs.level ) + && ( commandBufferCount == rhs.commandBufferCount ); + } + + bool operator!=( CommandBufferAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandBufferAllocateInfo; + + public: + const void* pNext = nullptr; + CommandPool commandPool; + CommandBufferLevel level; + uint32_t commandBufferCount; + }; + static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" ); + + struct RenderPassBeginInfo + { + RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(), Framebuffer framebuffer_ = Framebuffer(), Rect2D renderArea_ = Rect2D(), uint32_t clearValueCount_ = 0, const ClearValue* pClearValues_ = nullptr ) + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( clearValueCount_ ) + , pClearValues( pClearValues_ ) + { + } + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) ); + } + + RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassBeginInfo ) ); + return *this; + } + RenderPassBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ ) + { + framebuffer = framebuffer_; + return *this; + } + + RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ ) + { + renderArea = renderArea_; + return *this; + } + + RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ ) + { + clearValueCount = clearValueCount_; + return *this; + } + + RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ ) + { + pClearValues = pClearValues_; + return *this; + } + + operator const VkRenderPassBeginInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( renderPass == rhs.renderPass ) + && ( framebuffer == rhs.framebuffer ) + && ( renderArea == rhs.renderArea ) + && ( clearValueCount == rhs.clearValueCount ) + && ( pClearValues == rhs.pClearValues ); + } + + bool operator!=( RenderPassBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassBeginInfo; + + public: + const void* pNext = nullptr; + RenderPass renderPass; + Framebuffer framebuffer; + Rect2D renderArea; + uint32_t clearValueCount; + const ClearValue* pClearValues; + }; + static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); + + struct EventCreateInfo + { + EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() ) + : flags( flags_ ) + { + } + + EventCreateInfo( VkEventCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( EventCreateInfo ) ); + } + + EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( EventCreateInfo ) ); + return *this; + } + EventCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + EventCreateInfo& setFlags( EventCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator const VkEventCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( EventCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( EventCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eEventCreateInfo; + + public: + const void* pNext = nullptr; + EventCreateFlags flags; + }; + static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); + + struct SemaphoreCreateInfo + { + SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() ) + : flags( flags_ ) + { + } + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) ); + } + + SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreCreateInfo ) ); + return *this; + } + SemaphoreCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator const VkSemaphoreCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SemaphoreCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( SemaphoreCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSemaphoreCreateInfo; + + public: + const void* pNext = nullptr; + SemaphoreCreateFlags flags; + }; + static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + + struct FramebufferCreateInfo + { + FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(), RenderPass renderPass_ = RenderPass(), uint32_t attachmentCount_ = 0, const ImageView* pAttachments_ = nullptr, uint32_t width_ = 0, uint32_t height_ = 0, uint32_t layers_ = 0 ) + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + { + } + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) ); + } + + FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FramebufferCreateInfo ) ); + return *this; + } + FramebufferCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ ) + { + attachmentCount = attachmentCount_; + return *this; + } + + FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ ) + { + pAttachments = pAttachments_; + return *this; + } + + FramebufferCreateInfo& setWidth( uint32_t width_ ) + { + width = width_; + return *this; + } + + FramebufferCreateInfo& setHeight( uint32_t height_ ) + { + height = height_; + return *this; + } + + FramebufferCreateInfo& setLayers( uint32_t layers_ ) + { + layers = layers_; + return *this; + } + + operator const VkFramebufferCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( FramebufferCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( renderPass == rhs.renderPass ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( layers == rhs.layers ); + } + + bool operator!=( FramebufferCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFramebufferCreateInfo; + + public: + const void* pNext = nullptr; + FramebufferCreateFlags flags; + RenderPass renderPass; + uint32_t attachmentCount; + const ImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; + }; + static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); + + struct DisplayModeCreateInfoKHR + { + DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(), DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() ) + : flags( flags_ ) + , parameters( parameters_ ) + { + } + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) ); + } + + DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayModeCreateInfoKHR ) ); + return *this; + } + DisplayModeCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ ) + { + parameters = parameters_; + return *this; + } + + operator const VkDisplayModeCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayModeCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + + public: + const void* pNext = nullptr; + DisplayModeCreateFlagsKHR flags; + DisplayModeParametersKHR parameters; + }; + static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct DisplayPresentInfoKHR + { + DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(), Rect2D dstRect_ = Rect2D(), Bool32 persistent_ = 0 ) + : srcRect( srcRect_ ) + , dstRect( dstRect_ ) + , persistent( persistent_ ) + { + } + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) ); + } + + DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPresentInfoKHR ) ); + return *this; + } + DisplayPresentInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ ) + { + srcRect = srcRect_; + return *this; + } + + DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ ) + { + dstRect = dstRect_; + return *this; + } + + DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ ) + { + persistent = persistent_; + return *this; + } + + operator const VkDisplayPresentInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPresentInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcRect == rhs.srcRect ) + && ( dstRect == rhs.dstRect ) + && ( persistent == rhs.persistent ); + } + + bool operator!=( DisplayPresentInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayPresentInfoKHR; + + public: + const void* pNext = nullptr; + Rect2D srcRect; + Rect2D dstRect; + Bool32 persistent; + }; + static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidSurfaceCreateInfoKHR + { + AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(), struct ANativeWindow* window_ = nullptr ) + : flags( flags_ ) + , window( window_ ) + { + } + + AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) ); + } + + AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AndroidSurfaceCreateInfoKHR ) ); + return *this; + } + AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + AndroidSurfaceCreateInfoKHR& setWindow( struct ANativeWindow* window_ ) + { + window = window_; + return *this; + } + + operator const VkAndroidSurfaceCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( window == rhs.window ); + } + + bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + AndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; + }; + static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + struct MirSurfaceCreateInfoKHR + { + MirSurfaceCreateInfoKHR( MirSurfaceCreateFlagsKHR flags_ = MirSurfaceCreateFlagsKHR(), MirConnection* connection_ = nullptr, MirSurface* mirSurface_ = nullptr ) + : flags( flags_ ) + , connection( connection_ ) + , mirSurface( mirSurface_ ) + { + } + + MirSurfaceCreateInfoKHR( VkMirSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MirSurfaceCreateInfoKHR ) ); + } + + MirSurfaceCreateInfoKHR& operator=( VkMirSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MirSurfaceCreateInfoKHR ) ); + return *this; + } + MirSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MirSurfaceCreateInfoKHR& setFlags( MirSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + MirSurfaceCreateInfoKHR& setConnection( MirConnection* connection_ ) + { + connection = connection_; + return *this; + } + + MirSurfaceCreateInfoKHR& setMirSurface( MirSurface* mirSurface_ ) + { + mirSurface = mirSurface_; + return *this; + } + + operator const VkMirSurfaceCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MirSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( connection == rhs.connection ) + && ( mirSurface == rhs.mirSurface ); + } + + bool operator!=( MirSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMirSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + MirSurfaceCreateFlagsKHR flags; + MirConnection* connection; + MirSurface* mirSurface; + }; + static_assert( sizeof( MirSurfaceCreateInfoKHR ) == sizeof( VkMirSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + +#ifdef VK_USE_PLATFORM_VI_NN + struct ViSurfaceCreateInfoNN + { + ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = ViSurfaceCreateFlagsNN(), void* window_ = nullptr ) + : flags( flags_ ) + , window( window_ ) + { + } + + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) + { + memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) ); + } + + ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) + { + memcpy( this, &rhs, sizeof( ViSurfaceCreateInfoNN ) ); + return *this; + } + ViSurfaceCreateInfoNN& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ViSurfaceCreateInfoNN& setFlags( ViSurfaceCreateFlagsNN flags_ ) + { + flags = flags_; + return *this; + } + + ViSurfaceCreateInfoNN& setWindow( void* window_ ) + { + window = window_; + return *this; + } + + operator const VkViSurfaceCreateInfoNN&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ViSurfaceCreateInfoNN const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( window == rhs.window ); + } + + bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eViSurfaceCreateInfoNN; + + public: + const void* pNext = nullptr; + ViSurfaceCreateFlagsNN flags; + void* window; + }; + static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + struct WaylandSurfaceCreateInfoKHR + { + WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(), struct wl_display* display_ = nullptr, struct wl_surface* surface_ = nullptr ) + : flags( flags_ ) + , display( display_ ) + , surface( surface_ ) + { + } + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) ); + } + + WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( WaylandSurfaceCreateInfoKHR ) ); + return *this; + } + WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ ) + { + display = display_; + return *this; + } + + WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ ) + { + surface = surface_; + return *this; + } + + operator const VkWaylandSurfaceCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( display == rhs.display ) + && ( surface == rhs.surface ); + } + + bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + WaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; + }; + static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32SurfaceCreateInfoKHR + { + Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(), HINSTANCE hinstance_ = 0, HWND hwnd_ = 0 ) + : flags( flags_ ) + , hinstance( hinstance_ ) + , hwnd( hwnd_ ) + { + } + + Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) ); + } + + Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32SurfaceCreateInfoKHR ) ); + return *this; + } + Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ ) + { + hinstance = hinstance_; + return *this; + } + + Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ ) + { + hwnd = hwnd_; + return *this; + } + + operator const VkWin32SurfaceCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( hinstance == rhs.hinstance ) + && ( hwnd == rhs.hwnd ); + } + + bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + Win32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; + }; + static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + struct XlibSurfaceCreateInfoKHR + { + XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(), Display* dpy_ = nullptr, Window window_ = 0 ) + : flags( flags_ ) + , dpy( dpy_ ) + , window( window_ ) + { + } + + XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) ); + } + + XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XlibSurfaceCreateInfoKHR ) ); + return *this; + } + XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ ) + { + dpy = dpy_; + return *this; + } + + XlibSurfaceCreateInfoKHR& setWindow( Window window_ ) + { + window = window_; + return *this; + } + + operator const VkXlibSurfaceCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( dpy == rhs.dpy ) + && ( window == rhs.window ); + } + + bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + XlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; + }; + static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + struct XcbSurfaceCreateInfoKHR + { + XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(), xcb_connection_t* connection_ = nullptr, xcb_window_t window_ = 0 ) + : flags( flags_ ) + , connection( connection_ ) + , window( window_ ) + { + } + + XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) ); + } + + XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( XcbSurfaceCreateInfoKHR ) ); + return *this; + } + XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ ) + { + connection = connection_; + return *this; + } + + XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ ) + { + window = window_; + return *this; + } + + operator const VkXcbSurfaceCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( connection == rhs.connection ) + && ( window == rhs.window ); + } + + bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + XcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; + }; + static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + + struct DebugMarkerMarkerInfoEXT + { + DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, std::array const& color_ = { { 0, 0, 0, 0 } } ) + : pMarkerName( pMarkerName_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + } + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) ); + } + + DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerMarkerInfoEXT ) ); + return *this; + } + DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ ) + { + pMarkerName = pMarkerName_; + return *this; + } + + DebugMarkerMarkerInfoEXT& setColor( std::array color_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator const VkDebugMarkerMarkerInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pMarkerName == rhs.pMarkerName ) + && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + + public: + const void* pNext = nullptr; + const char* pMarkerName; + float color[4]; + }; + static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" ); + + struct DedicatedAllocationImageCreateInfoNV + { + DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 ) + : dedicatedAllocation( dedicatedAllocation_ ) + { + } + + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) ); + } + + DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationImageCreateInfoNV ) ); + return *this; + } + DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ ) + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator const VkDedicatedAllocationImageCreateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 dedicatedAllocation; + }; + static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" ); + + struct DedicatedAllocationBufferCreateInfoNV + { + DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 ) + : dedicatedAllocation( dedicatedAllocation_ ) + { + } + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) ); + } + + DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationBufferCreateInfoNV ) ); + return *this; + } + DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ ) + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator const VkDedicatedAllocationBufferCreateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 dedicatedAllocation; + }; + static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" ); + + struct DedicatedAllocationMemoryAllocateInfoNV + { + DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(), Buffer buffer_ = Buffer() ) + : image( image_ ) + , buffer( buffer_ ) + { + } + + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) ); + } + + DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) ); + return *this; + } + DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ ) + { + image = image_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator const VkDedicatedAllocationMemoryAllocateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + + public: + const void* pNext = nullptr; + Image image; + Buffer buffer; + }; + static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_WIN32_NV + struct ExportMemoryWin32HandleInfoNV + { + ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + { + } + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) ); + } + + ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoNV ) ); + return *this; + } + ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + operator const VkExportMemoryWin32HandleInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ); + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + +#ifdef VK_USE_PLATFORM_WIN32_NV + struct Win32KeyedMutexAcquireReleaseInfoNV + { + Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr ) + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + { + } + + Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) ); + } + + Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) ); + return *this; + } + Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ ) + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ ) + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ ) + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) + { + pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ ) + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ ) + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ ) + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + + operator const VkWin32KeyedMutexAcquireReleaseInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( acquireCount == rhs.acquireCount ) + && ( pAcquireSyncs == rhs.pAcquireSyncs ) + && ( pAcquireKeys == rhs.pAcquireKeys ) + && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) + && ( releaseCount == rhs.releaseCount ) + && ( pReleaseSyncs == rhs.pReleaseSyncs ) + && ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + + public: + const void* pNext = nullptr; + uint32_t acquireCount; + const DeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const DeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + struct DeviceGeneratedCommandsFeaturesNVX + { + DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 ) + : computeBindingPointSupport( computeBindingPointSupport_ ) + { + } + + DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) ); + } + + DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsFeaturesNVX ) ); + return *this; + } + DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ ) + { + computeBindingPointSupport = computeBindingPointSupport_; + return *this; + } + + operator const VkDeviceGeneratedCommandsFeaturesNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( computeBindingPointSupport == rhs.computeBindingPointSupport ); + } + + bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX; + + public: + const void* pNext = nullptr; + Bool32 computeBindingPointSupport; + }; + static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" ); + + struct DeviceGeneratedCommandsLimitsNVX + { + DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 ) + : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ ) + , maxObjectEntryCounts( maxObjectEntryCounts_ ) + , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ ) + , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ ) + , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ ) + { + } + + DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) ); + } + + DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGeneratedCommandsLimitsNVX ) ); + return *this; + } + DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) + { + maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) + { + maxObjectEntryCounts = maxObjectEntryCounts_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) + { + minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) + { + minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) + { + minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_; + return *this; + } + + operator const VkDeviceGeneratedCommandsLimitsNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount ) + && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts ) + && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment ) + && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment ) + && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment ); + } + + bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX; + + public: + const void* pNext = nullptr; + uint32_t maxIndirectCommandsLayoutTokenCount; + uint32_t maxObjectEntryCounts; + uint32_t minSequenceCountBufferOffsetAlignment; + uint32_t minSequenceIndexBufferOffsetAlignment; + uint32_t minCommandsTokenBufferOffsetAlignment; + }; + static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" ); + + struct CmdReserveSpaceForCommandsInfoNVX + { + CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 ) + : objectTable( objectTable_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + { + } + + CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) ); + } + + CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdReserveSpaceForCommandsInfoNVX ) ); + return *this; + } + CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ ) + { + objectTable = objectTable_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ ) + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ ) + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + operator const VkCmdReserveSpaceForCommandsInfoNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectTable == rhs.objectTable ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( maxSequencesCount == rhs.maxSequencesCount ); + } + + bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX; + + public: + const void* pNext = nullptr; + ObjectTableNVX objectTable; + IndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t maxSequencesCount; + }; + static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceFeatures2 + { + PhysicalDeviceFeatures2( PhysicalDeviceFeatures features_ = PhysicalDeviceFeatures() ) + : features( features_ ) + { + } + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) ); + } + + PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceFeatures2 ) ); + return *this; + } + PhysicalDeviceFeatures2& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFeatures2& setFeatures( PhysicalDeviceFeatures features_ ) + { + features = features_; + return *this; + } + + operator const VkPhysicalDeviceFeatures2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceFeatures2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( features == rhs.features ); + } + + bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceFeatures2; + + public: + void* pNext = nullptr; + PhysicalDeviceFeatures features; + }; + static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + struct PhysicalDevicePushDescriptorPropertiesKHR + { + PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = 0 ) + : maxPushDescriptors( maxPushDescriptors_ ) + { + } + + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) ); + } + + PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) ); + return *this; + } + PhysicalDevicePushDescriptorPropertiesKHR& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePushDescriptorPropertiesKHR& setMaxPushDescriptors( uint32_t maxPushDescriptors_ ) + { + maxPushDescriptors = maxPushDescriptors_; + return *this; + } + + operator const VkPhysicalDevicePushDescriptorPropertiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPushDescriptors == rhs.maxPushDescriptors ); + } + + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + + public: + void* pNext = nullptr; + uint32_t maxPushDescriptors; + }; + static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); + + struct PresentRegionsKHR + { + PresentRegionsKHR( uint32_t swapchainCount_ = 0, const PresentRegionKHR* pRegions_ = nullptr ) + : swapchainCount( swapchainCount_ ) + , pRegions( pRegions_ ) + { + } + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionsKHR ) ); + } + + PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentRegionsKHR ) ); + return *this; + } + PresentRegionsKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PresentRegionsKHR& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentRegionsKHR& setPRegions( const PresentRegionKHR* pRegions_ ) + { + pRegions = pRegions_; + return *this; + } + + operator const VkPresentRegionsKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PresentRegionsKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( PresentRegionsKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePresentRegionsKHR; + + public: + const void* pNext = nullptr; + uint32_t swapchainCount; + const PresentRegionKHR* pRegions; + }; + static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceVariablePointerFeatures + { + PhysicalDeviceVariablePointerFeatures( Bool32 variablePointersStorageBuffer_ = 0, Bool32 variablePointers_ = 0 ) + : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + { + } + + PhysicalDeviceVariablePointerFeatures( VkPhysicalDeviceVariablePointerFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) ); + } + + PhysicalDeviceVariablePointerFeatures& operator=( VkPhysicalDeviceVariablePointerFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVariablePointerFeatures ) ); + return *this; + } + PhysicalDeviceVariablePointerFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVariablePointerFeatures& setVariablePointersStorageBuffer( Bool32 variablePointersStorageBuffer_ ) + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + PhysicalDeviceVariablePointerFeatures& setVariablePointers( Bool32 variablePointers_ ) + { + variablePointers = variablePointers_; + return *this; + } + + operator const VkPhysicalDeviceVariablePointerFeatures&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceVariablePointerFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) + && ( variablePointers == rhs.variablePointers ); + } + + bool operator!=( PhysicalDeviceVariablePointerFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceVariablePointerFeatures; + + public: + void* pNext = nullptr; + Bool32 variablePointersStorageBuffer; + Bool32 variablePointers; + }; + static_assert( sizeof( PhysicalDeviceVariablePointerFeatures ) == sizeof( VkPhysicalDeviceVariablePointerFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointerFeatures; + + struct PhysicalDeviceIDProperties + { + operator const VkPhysicalDeviceIDProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceIDProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( deviceNodeMask == rhs.deviceNodeMask ) + && ( deviceLUIDValid == rhs.deviceLUIDValid ); + } + + bool operator!=( PhysicalDeviceIDProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceIdProperties; + + public: + void* pNext = nullptr; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + Bool32 deviceLUIDValid; + }; + static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoKHR + { + ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) ); + } + + ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryWin32HandleInfoKHR ) ); + return *this; + } + ExportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator const VkExportMemoryWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryWin32HandlePropertiesKHR + { + operator const VkMemoryWin32HandlePropertiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + + public: + void* pNext = nullptr; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct MemoryFdPropertiesKHR + { + operator const VkMemoryFdPropertiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryFdPropertiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryFdPropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryFdPropertiesKHR; + + public: + void* pNext = nullptr; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32KeyedMutexAcquireReleaseInfoKHR + { + Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeouts_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr ) + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeouts( pAcquireTimeouts_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + { + } + + Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) ); + } + + Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) ); + return *this; + } + Win32KeyedMutexAcquireReleaseInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setAcquireCount( uint32_t acquireCount_ ) + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ ) + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireKeys( const uint64_t* pAcquireKeys_ ) + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) + { + pAcquireTimeouts = pAcquireTimeouts_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setReleaseCount( uint32_t releaseCount_ ) + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ ) + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& setPReleaseKeys( const uint64_t* pReleaseKeys_ ) + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + + operator const VkWin32KeyedMutexAcquireReleaseInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( acquireCount == rhs.acquireCount ) + && ( pAcquireSyncs == rhs.pAcquireSyncs ) + && ( pAcquireKeys == rhs.pAcquireKeys ) + && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) + && ( releaseCount == rhs.releaseCount ) + && ( pReleaseSyncs == rhs.pReleaseSyncs ) + && ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t acquireCount; + const DeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const DeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportSemaphoreWin32HandleInfoKHR + { + ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) ); + } + + ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreWin32HandleInfoKHR ) ); + return *this; + } + ExportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator const VkExportSemaphoreWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; + }; + static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct D3D12FenceSubmitInfoKHR + { + D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0, const uint64_t* pWaitSemaphoreValues_ = nullptr, uint32_t signalSemaphoreValuesCount_ = 0, const uint64_t* pSignalSemaphoreValues_ = nullptr ) + : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + { + } + + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) ); + } + + D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( D3D12FenceSubmitInfoKHR ) ); + return *this; + } + D3D12FenceSubmitInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) + { + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) + { + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR& setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + + operator const VkD3D12FenceSubmitInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) + && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) + && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) + && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + } + + bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; + }; + static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportFenceWin32HandleInfoKHR + { + ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0, LPCWSTR name_ = 0 ) + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) ); + } + + ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceWin32HandleInfoKHR ) ); + return *this; + } + ExportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportFenceWin32HandleInfoKHR& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) + { + pAttributes = pAttributes_; + return *this; + } + + ExportFenceWin32HandleInfoKHR& setDwAccess( DWORD dwAccess_ ) + { + dwAccess = dwAccess_; + return *this; + } + + ExportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator const VkExportFenceWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; + }; + static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct PhysicalDeviceMultiviewFeatures + { + PhysicalDeviceMultiviewFeatures( Bool32 multiview_ = 0, Bool32 multiviewGeometryShader_ = 0, Bool32 multiviewTessellationShader_ = 0 ) + : multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + { + } + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) ); + } + + PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceMultiviewFeatures ) ); + return *this; + } + PhysicalDeviceMultiviewFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMultiviewFeatures& setMultiview( Bool32 multiview_ ) + { + multiview = multiview_; + return *this; + } + + PhysicalDeviceMultiviewFeatures& setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ ) + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + PhysicalDeviceMultiviewFeatures& setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ ) + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + operator const VkPhysicalDeviceMultiviewFeatures&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( multiview == rhs.multiview ) + && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) + && ( multiviewTessellationShader == rhs.multiviewTessellationShader ); + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + + public: + void* pNext = nullptr; + Bool32 multiview; + Bool32 multiviewGeometryShader; + Bool32 multiviewTessellationShader; + }; + static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + struct PhysicalDeviceMultiviewProperties + { + operator const VkPhysicalDeviceMultiviewProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) + && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); + } + + bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + + public: + void* pNext = nullptr; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; + }; + static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + struct RenderPassMultiviewCreateInfo + { + RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0, const uint32_t* pViewMasks_ = nullptr, uint32_t dependencyCount_ = 0, const int32_t* pViewOffsets_ = nullptr, uint32_t correlationMaskCount_ = 0, const uint32_t* pCorrelationMasks_ = nullptr ) + : subpassCount( subpassCount_ ) + , pViewMasks( pViewMasks_ ) + , dependencyCount( dependencyCount_ ) + , pViewOffsets( pViewOffsets_ ) + , correlationMaskCount( correlationMaskCount_ ) + , pCorrelationMasks( pCorrelationMasks_ ) + { + } + + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) ); + } + + RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassMultiviewCreateInfo ) ); + return *this; + } + RenderPassMultiviewCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassMultiviewCreateInfo& setSubpassCount( uint32_t subpassCount_ ) + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassMultiviewCreateInfo& setPViewMasks( const uint32_t* pViewMasks_ ) + { + pViewMasks = pViewMasks_; + return *this; + } + + RenderPassMultiviewCreateInfo& setDependencyCount( uint32_t dependencyCount_ ) + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassMultiviewCreateInfo& setPViewOffsets( const int32_t* pViewOffsets_ ) + { + pViewOffsets = pViewOffsets_; + return *this; + } + + RenderPassMultiviewCreateInfo& setCorrelationMaskCount( uint32_t correlationMaskCount_ ) + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + RenderPassMultiviewCreateInfo& setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + + operator const VkRenderPassMultiviewCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subpassCount == rhs.subpassCount ) + && ( pViewMasks == rhs.pViewMasks ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pViewOffsets == rhs.pViewOffsets ) + && ( correlationMaskCount == rhs.correlationMaskCount ) + && ( pCorrelationMasks == rhs.pCorrelationMasks ); + } + + bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + + public: + const void* pNext = nullptr; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; + }; + static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" ); + + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + + struct BindBufferMemoryInfo + { + BindBufferMemoryInfo( Buffer buffer_ = Buffer(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0 ) + : buffer( buffer_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + { + } + + BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) ); + } + + BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryInfo ) ); + return *this; + } + BindBufferMemoryInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + BindBufferMemoryInfo& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + BindBufferMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + operator const VkBindBufferMemoryInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BindBufferMemoryInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindBufferMemoryInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindBufferMemoryInfo; + + public: + const void* pNext = nullptr; + Buffer buffer; + DeviceMemory memory; + DeviceSize memoryOffset; + }; + static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); + + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + + struct BindBufferMemoryDeviceGroupInfo + { + BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, const uint32_t* pDeviceIndices_ = nullptr ) + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + { + } + + BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) ); + } + + BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindBufferMemoryDeviceGroupInfo ) ); + return *this; + } + BindBufferMemoryDeviceGroupInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ ) + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ ) + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + operator const VkBindBufferMemoryDeviceGroupInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ); + } + + bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + }; + static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); + + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + + struct BindImageMemoryInfo + { + BindImageMemoryInfo( Image image_ = Image(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0 ) + : image( image_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + { + } + + BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) ); + } + + BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryInfo ) ); + return *this; + } + BindImageMemoryInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImageMemoryInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + BindImageMemoryInfo& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + BindImageMemoryInfo& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + operator const VkBindImageMemoryInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BindImageMemoryInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindImageMemoryInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImageMemoryInfo; + + public: + const void* pNext = nullptr; + Image image; + DeviceMemory memory; + DeviceSize memoryOffset; + }; + static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); + + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + + struct BindImageMemoryDeviceGroupInfo + { + BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0, const uint32_t* pDeviceIndices_ = nullptr, uint32_t splitInstanceBindRegionCount_ = 0, const Rect2D* pSplitInstanceBindRegions_ = nullptr ) + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) + , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + { + } + + BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) ); + } + + BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemoryDeviceGroupInfo ) ); + return *this; + } + BindImageMemoryDeviceGroupInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setDeviceIndexCount( uint32_t deviceIndexCount_ ) + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setPDeviceIndices( const uint32_t* pDeviceIndices_ ) + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) + { + splitInstanceBindRegionCount = splitInstanceBindRegionCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo& setPSplitInstanceBindRegions( const Rect2D* pSplitInstanceBindRegions_ ) + { + pSplitInstanceBindRegions = pSplitInstanceBindRegions_; + return *this; + } + + operator const VkBindImageMemoryDeviceGroupInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ) + && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) + && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); + } + + bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const Rect2D* pSplitInstanceBindRegions; + }; + static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); + + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + struct DeviceGroupRenderPassBeginInfo + { + DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0, uint32_t deviceRenderAreaCount_ = 0, const Rect2D* pDeviceRenderAreas_ = nullptr ) + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( deviceRenderAreaCount_ ) + , pDeviceRenderAreas( pDeviceRenderAreas_ ) + { + } + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) ); + } + + DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupRenderPassBeginInfo ) ); + return *this; + } + DeviceGroupRenderPassBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupRenderPassBeginInfo& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + DeviceGroupRenderPassBeginInfo& setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + DeviceGroupRenderPassBeginInfo& setPDeviceRenderAreas( const Rect2D* pDeviceRenderAreas_ ) + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + + operator const VkDeviceGroupRenderPassBeginInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceMask == rhs.deviceMask ) + && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) + && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const Rect2D* pDeviceRenderAreas; + }; + static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupCommandBufferBeginInfo + { + DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 ) + : deviceMask( deviceMask_ ) + { + } + + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) ); + } + + DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupCommandBufferBeginInfo ) ); + return *this; + } + DeviceGroupCommandBufferBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupCommandBufferBeginInfo& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + operator const VkDeviceGroupCommandBufferBeginInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + + public: + const void* pNext = nullptr; + uint32_t deviceMask; + }; + static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + struct DeviceGroupSubmitInfo + { + DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0, const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr, uint32_t commandBufferCount_ = 0, const uint32_t* pCommandBufferDeviceMasks_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + { + } + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) ); + } + + DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSubmitInfo ) ); + return *this; + } + DeviceGroupSubmitInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupSubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo& setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + + DeviceGroupSubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ ) + { + commandBufferCount = commandBufferCount_; + return *this; + } + + DeviceGroupSubmitInfo& setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + + DeviceGroupSubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo& setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + + operator const VkDeviceGroupSubmitInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupSubmitInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + } + + bool operator!=( DeviceGroupSubmitInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupSubmitInfo; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; + }; + static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupBindSparseInfo + { + DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0, uint32_t memoryDeviceIndex_ = 0 ) + : resourceDeviceIndex( resourceDeviceIndex_ ) + , memoryDeviceIndex( memoryDeviceIndex_ ) + { + } + + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) ); + } + + DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupBindSparseInfo ) ); + return *this; + } + DeviceGroupBindSparseInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupBindSparseInfo& setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) + { + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + DeviceGroupBindSparseInfo& setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } + + operator const VkDeviceGroupBindSparseInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupBindSparseInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) + && ( memoryDeviceIndex == rhs.memoryDeviceIndex ); + } + + bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + + public: + const void* pNext = nullptr; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; + }; + static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + + struct ImageSwapchainCreateInfoKHR + { + ImageSwapchainCreateInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR() ) + : swapchain( swapchain_ ) + { + } + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) ); + } + + ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSwapchainCreateInfoKHR ) ); + return *this; + } + ImageSwapchainCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageSwapchainCreateInfoKHR& setSwapchain( SwapchainKHR swapchain_ ) + { + swapchain = swapchain_; + return *this; + } + + operator const VkImageSwapchainCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ); + } + + bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainKHR swapchain; + }; + static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct BindImageMemorySwapchainInfoKHR + { + BindImageMemorySwapchainInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(), uint32_t imageIndex_ = 0 ) + : swapchain( swapchain_ ) + , imageIndex( imageIndex_ ) + { + } + + BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) ); + } + + BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImageMemorySwapchainInfoKHR ) ); + return *this; + } + BindImageMemorySwapchainInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImageMemorySwapchainInfoKHR& setSwapchain( SwapchainKHR swapchain_ ) + { + swapchain = swapchain_; + return *this; + } + + BindImageMemorySwapchainInfoKHR& setImageIndex( uint32_t imageIndex_ ) + { + imageIndex = imageIndex_; + return *this; + } + + operator const VkBindImageMemorySwapchainInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ) + && ( imageIndex == rhs.imageIndex ); + } + + bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainKHR swapchain; + uint32_t imageIndex; + }; + static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" ); + + struct AcquireNextImageInfoKHR + { + AcquireNextImageInfoKHR( SwapchainKHR swapchain_ = SwapchainKHR(), uint64_t timeout_ = 0, Semaphore semaphore_ = Semaphore(), Fence fence_ = Fence(), uint32_t deviceMask_ = 0 ) + : swapchain( swapchain_ ) + , timeout( timeout_ ) + , semaphore( semaphore_ ) + , fence( fence_ ) + , deviceMask( deviceMask_ ) + { + } + + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) ); + } + + AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( AcquireNextImageInfoKHR ) ); + return *this; + } + AcquireNextImageInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + AcquireNextImageInfoKHR& setSwapchain( SwapchainKHR swapchain_ ) + { + swapchain = swapchain_; + return *this; + } + + AcquireNextImageInfoKHR& setTimeout( uint64_t timeout_ ) + { + timeout = timeout_; + return *this; + } + + AcquireNextImageInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + AcquireNextImageInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + AcquireNextImageInfoKHR& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + operator const VkAcquireNextImageInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AcquireNextImageInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ) + && ( timeout == rhs.timeout ) + && ( semaphore == rhs.semaphore ) + && ( fence == rhs.fence ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( AcquireNextImageInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAcquireNextImageInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainKHR swapchain; + uint64_t timeout; + Semaphore semaphore; + Fence fence; + uint32_t deviceMask; + }; + static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" ); + + struct HdrMetadataEXT + { + HdrMetadataEXT( XYColorEXT displayPrimaryRed_ = XYColorEXT(), XYColorEXT displayPrimaryGreen_ = XYColorEXT(), XYColorEXT displayPrimaryBlue_ = XYColorEXT(), XYColorEXT whitePoint_ = XYColorEXT(), float maxLuminance_ = 0, float minLuminance_ = 0, float maxContentLightLevel_ = 0, float maxFrameAverageLightLevel_ = 0 ) + : displayPrimaryRed( displayPrimaryRed_ ) + , displayPrimaryGreen( displayPrimaryGreen_ ) + , displayPrimaryBlue( displayPrimaryBlue_ ) + , whitePoint( whitePoint_ ) + , maxLuminance( maxLuminance_ ) + , minLuminance( minLuminance_ ) + , maxContentLightLevel( maxContentLightLevel_ ) + , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + { + } + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( HdrMetadataEXT ) ); + } + + HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( HdrMetadataEXT ) ); + return *this; + } + HdrMetadataEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + HdrMetadataEXT& setDisplayPrimaryRed( XYColorEXT displayPrimaryRed_ ) + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + HdrMetadataEXT& setDisplayPrimaryGreen( XYColorEXT displayPrimaryGreen_ ) + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + HdrMetadataEXT& setDisplayPrimaryBlue( XYColorEXT displayPrimaryBlue_ ) + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + HdrMetadataEXT& setWhitePoint( XYColorEXT whitePoint_ ) + { + whitePoint = whitePoint_; + return *this; + } + + HdrMetadataEXT& setMaxLuminance( float maxLuminance_ ) + { + maxLuminance = maxLuminance_; + return *this; + } + + HdrMetadataEXT& setMinLuminance( float minLuminance_ ) + { + minLuminance = minLuminance_; + return *this; + } + + HdrMetadataEXT& setMaxContentLightLevel( float maxContentLightLevel_ ) + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + HdrMetadataEXT& setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } + + operator const VkHdrMetadataEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( HdrMetadataEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayPrimaryRed == rhs.displayPrimaryRed ) + && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) + && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) + && ( whitePoint == rhs.whitePoint ) + && ( maxLuminance == rhs.maxLuminance ) + && ( minLuminance == rhs.minLuminance ) + && ( maxContentLightLevel == rhs.maxContentLightLevel ) + && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); + } + + bool operator!=( HdrMetadataEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eHdrMetadataEXT; + + public: + const void* pNext = nullptr; + XYColorEXT displayPrimaryRed; + XYColorEXT displayPrimaryGreen; + XYColorEXT displayPrimaryBlue; + XYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; + }; + static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); + + struct PresentTimesInfoGOOGLE + { + PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0, const PresentTimeGOOGLE* pTimes_ = nullptr ) + : swapchainCount( swapchainCount_ ) + , pTimes( pTimes_ ) + { + } + + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) ); + } + + PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentTimesInfoGOOGLE ) ); + return *this; + } + PresentTimesInfoGOOGLE& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PresentTimesInfoGOOGLE& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentTimesInfoGOOGLE& setPTimes( const PresentTimeGOOGLE* pTimes_ ) + { + pTimes = pTimes_; + return *this; + } + + operator const VkPresentTimesInfoGOOGLE&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PresentTimesInfoGOOGLE const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pTimes == rhs.pTimes ); + } + + bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + + public: + const void* pNext = nullptr; + uint32_t swapchainCount; + const PresentTimeGOOGLE* pTimes; + }; + static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_IOS_MVK + struct IOSSurfaceCreateInfoMVK + { + IOSSurfaceCreateInfoMVK( IOSSurfaceCreateFlagsMVK flags_ = IOSSurfaceCreateFlagsMVK(), const void* pView_ = nullptr ) + : flags( flags_ ) + , pView( pView_ ) + { + } + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) ); + } + + IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( IOSSurfaceCreateInfoMVK ) ); + return *this; + } + IOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + IOSSurfaceCreateInfoMVK& setFlags( IOSSurfaceCreateFlagsMVK flags_ ) + { + flags = flags_; + return *this; + } + + IOSSurfaceCreateInfoMVK& setPView( const void* pView_ ) + { + pView = pView_; + return *this; + } + + operator const VkIOSSurfaceCreateInfoMVK&() const + { + return *reinterpret_cast(this); + } + + bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pView == rhs.pView ); + } + + bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + + public: + const void* pNext = nullptr; + IOSSurfaceCreateFlagsMVK flags; + const void* pView; + }; + static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + struct MacOSSurfaceCreateInfoMVK + { + MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateFlagsMVK flags_ = MacOSSurfaceCreateFlagsMVK(), const void* pView_ = nullptr ) + : flags( flags_ ) + , pView( pView_ ) + { + } + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) ); + } + + MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) + { + memcpy( this, &rhs, sizeof( MacOSSurfaceCreateInfoMVK ) ); + return *this; + } + MacOSSurfaceCreateInfoMVK& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MacOSSurfaceCreateInfoMVK& setFlags( MacOSSurfaceCreateFlagsMVK flags_ ) + { + flags = flags_; + return *this; + } + + MacOSSurfaceCreateInfoMVK& setPView( const void* pView_ ) + { + pView = pView_; + return *this; + } + + operator const VkMacOSSurfaceCreateInfoMVK&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pView == rhs.pView ); + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + + public: + const void* pNext = nullptr; + MacOSSurfaceCreateFlagsMVK flags; + const void* pView; + }; + static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + struct PipelineViewportWScalingStateCreateInfoNV + { + PipelineViewportWScalingStateCreateInfoNV( Bool32 viewportWScalingEnable_ = 0, uint32_t viewportCount_ = 0, const ViewportWScalingNV* pViewportWScalings_ = nullptr ) + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( viewportCount_ ) + , pViewportWScalings( pViewportWScalings_ ) + { + } + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) ); + } + + PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportWScalingStateCreateInfoNV ) ); + return *this; + } + PipelineViewportWScalingStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV& setViewportWScalingEnable( Bool32 viewportWScalingEnable_ ) + { + viewportWScalingEnable = viewportWScalingEnable_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ ) + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV& setPViewportWScalings( const ViewportWScalingNV* pViewportWScalings_ ) + { + pViewportWScalings = pViewportWScalings_; + return *this; + } + + operator const VkPipelineViewportWScalingStateCreateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewportWScalings == rhs.pViewportWScalings ); + } + + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + + public: + const void* pNext = nullptr; + Bool32 viewportWScalingEnable; + uint32_t viewportCount; + const ViewportWScalingNV* pViewportWScalings; + }; + static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = 0 ) + : maxDiscardRectangles( maxDiscardRectangles_ ) + { + } + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) ); + } + + PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) ); + return *this; + } + PhysicalDeviceDiscardRectanglePropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDiscardRectanglePropertiesEXT& setMaxDiscardRectangles( uint32_t maxDiscardRectangles_ ) + { + maxDiscardRectangles = maxDiscardRectangles_; + return *this; + } + + operator const VkPhysicalDeviceDiscardRectanglePropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxDiscardRectangles; + }; + static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + operator const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + public: + void* pNext = nullptr; + Bool32 perViewPositionAllComponents; + }; + static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSurfaceInfo2KHR + { + PhysicalDeviceSurfaceInfo2KHR( SurfaceKHR surface_ = SurfaceKHR() ) + : surface( surface_ ) + { + } + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) ); + } + + PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSurfaceInfo2KHR ) ); + return *this; + } + PhysicalDeviceSurfaceInfo2KHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR& setSurface( SurfaceKHR surface_ ) + { + surface = surface_; + return *this; + } + + operator const VkPhysicalDeviceSurfaceInfo2KHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surface == rhs.surface ); + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + + public: + const void* pNext = nullptr; + SurfaceKHR surface; + }; + static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); + + struct PhysicalDevice16BitStorageFeatures + { + PhysicalDevice16BitStorageFeatures( Bool32 storageBuffer16BitAccess_ = 0, Bool32 uniformAndStorageBuffer16BitAccess_ = 0, Bool32 storagePushConstant16_ = 0, Bool32 storageInputOutput16_ = 0 ) + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + { + } + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) ); + } + + PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDevice16BitStorageFeatures ) ); + return *this; + } + PhysicalDevice16BitStorageFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ ) + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ ) + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setStoragePushConstant16( Bool32 storagePushConstant16_ ) + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + PhysicalDevice16BitStorageFeatures& setStorageInputOutput16( Bool32 storageInputOutput16_ ) + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + operator const VkPhysicalDevice16BitStorageFeatures&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) + && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) + && ( storagePushConstant16 == rhs.storagePushConstant16 ) + && ( storageInputOutput16 == rhs.storageInputOutput16 ); + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + + public: + void* pNext = nullptr; + Bool32 storageBuffer16BitAccess; + Bool32 uniformAndStorageBuffer16BitAccess; + Bool32 storagePushConstant16; + Bool32 storageInputOutput16; + }; + static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct BufferMemoryRequirementsInfo2 + { + BufferMemoryRequirementsInfo2( Buffer buffer_ = Buffer() ) + : buffer( buffer_ ) + { + } + + BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) ); + } + + BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryRequirementsInfo2 ) ); + return *this; + } + BufferMemoryRequirementsInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferMemoryRequirementsInfo2& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator const VkBufferMemoryRequirementsInfo2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + + public: + const void* pNext = nullptr; + Buffer buffer; + }; + static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + + struct ImageMemoryRequirementsInfo2 + { + ImageMemoryRequirementsInfo2( Image image_ = Image() ) + : image( image_ ) + { + } + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) ); + } + + ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryRequirementsInfo2 ) ); + return *this; + } + ImageMemoryRequirementsInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageMemoryRequirementsInfo2& setImage( Image image_ ) + { + image = image_; + return *this; + } + + operator const VkImageMemoryRequirementsInfo2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + + public: + const void* pNext = nullptr; + Image image; + }; + static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + + struct ImageSparseMemoryRequirementsInfo2 + { + ImageSparseMemoryRequirementsInfo2( Image image_ = Image() ) + : image( image_ ) + { + } + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) ); + } + + ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSparseMemoryRequirementsInfo2 ) ); + return *this; + } + ImageSparseMemoryRequirementsInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageSparseMemoryRequirementsInfo2& setImage( Image image_ ) + { + image = image_; + return *this; + } + + operator const VkImageSparseMemoryRequirementsInfo2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + + public: + const void* pNext = nullptr; + Image image; + }; + static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct MemoryRequirements2 + { + operator const VkMemoryRequirements2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryRequirements2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( MemoryRequirements2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryRequirements2; + + public: + void* pNext = nullptr; + MemoryRequirements memoryRequirements; + }; + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); + + using MemoryRequirements2KHR = MemoryRequirements2; + + struct MemoryDedicatedRequirements + { + operator const VkMemoryDedicatedRequirements&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryDedicatedRequirements const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) + && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); + } + + bool operator!=( MemoryDedicatedRequirements const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryDedicatedRequirements; + + public: + void* pNext = nullptr; + Bool32 prefersDedicatedAllocation; + Bool32 requiresDedicatedAllocation; + }; + static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" ); + + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + struct MemoryDedicatedAllocateInfo + { + MemoryDedicatedAllocateInfo( Image image_ = Image(), Buffer buffer_ = Buffer() ) + : image( image_ ) + , buffer( buffer_ ) + { + } + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) ); + } + + MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryDedicatedAllocateInfo ) ); + return *this; + } + MemoryDedicatedAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryDedicatedAllocateInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + MemoryDedicatedAllocateInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator const VkMemoryDedicatedAllocateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + + public: + const void* pNext = nullptr; + Image image; + Buffer buffer; + }; + static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" ); + + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + struct SamplerYcbcrConversionInfo + { + SamplerYcbcrConversionInfo( SamplerYcbcrConversion conversion_ = SamplerYcbcrConversion() ) + : conversion( conversion_ ) + { + } + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) ); + } + + SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionInfo ) ); + return *this; + } + SamplerYcbcrConversionInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionInfo& setConversion( SamplerYcbcrConversion conversion_ ) + { + conversion = conversion_; + return *this; + } + + operator const VkSamplerYcbcrConversionInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerYcbcrConversionInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( conversion == rhs.conversion ); + } + + bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + + public: + const void* pNext = nullptr; + SamplerYcbcrConversion conversion; + }; + static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" ); + + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + PhysicalDeviceSamplerYcbcrConversionFeatures( Bool32 samplerYcbcrConversion_ = 0 ) + : samplerYcbcrConversion( samplerYcbcrConversion_ ) + { + } + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) ); + } + + PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) ); + return *this; + } + PhysicalDeviceSamplerYcbcrConversionFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures& setSamplerYcbcrConversion( Bool32 samplerYcbcrConversion_ ) + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + operator const VkPhysicalDeviceSamplerYcbcrConversionFeatures&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + + public: + void* pNext = nullptr; + Bool32 samplerYcbcrConversion; + }; + static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" ); + + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + struct SamplerYcbcrConversionImageFormatProperties + { + operator const VkSamplerYcbcrConversionImageFormatProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); + } + + bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + + public: + void* pNext = nullptr; + uint32_t combinedImageSamplerDescriptorCount; + }; + static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" ); + + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + + struct TextureLODGatherFormatPropertiesAMD + { + operator const VkTextureLODGatherFormatPropertiesAMD&() const + { + return *reinterpret_cast(this); + } + + bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); + } + + bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + + public: + void* pNext = nullptr; + Bool32 supportsTextureGatherLODBiasAMD; + }; + static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" ); + + struct ProtectedSubmitInfo + { + ProtectedSubmitInfo( Bool32 protectedSubmit_ = 0 ) + : protectedSubmit( protectedSubmit_ ) + { + } + + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) ); + } + + ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ProtectedSubmitInfo ) ); + return *this; + } + ProtectedSubmitInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ProtectedSubmitInfo& setProtectedSubmit( Bool32 protectedSubmit_ ) + { + protectedSubmit = protectedSubmit_; + return *this; + } + + operator const VkProtectedSubmitInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ProtectedSubmitInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedSubmit == rhs.protectedSubmit ); + } + + bool operator!=( ProtectedSubmitInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eProtectedSubmitInfo; + + public: + const void* pNext = nullptr; + Bool32 protectedSubmit; + }; + static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProtectedMemoryFeatures + { + PhysicalDeviceProtectedMemoryFeatures( Bool32 protectedMemory_ = 0 ) + : protectedMemory( protectedMemory_ ) + { + } + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) ); + } + + PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryFeatures ) ); + return *this; + } + PhysicalDeviceProtectedMemoryFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures& setProtectedMemory( Bool32 protectedMemory_ ) + { + protectedMemory = protectedMemory_; + return *this; + } + + operator const VkPhysicalDeviceProtectedMemoryFeatures&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedMemory == rhs.protectedMemory ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + + public: + void* pNext = nullptr; + Bool32 protectedMemory; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProtectedMemoryProperties + { + PhysicalDeviceProtectedMemoryProperties( Bool32 protectedNoFault_ = 0 ) + : protectedNoFault( protectedNoFault_ ) + { + } + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) ); + } + + PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceProtectedMemoryProperties ) ); + return *this; + } + PhysicalDeviceProtectedMemoryProperties& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProtectedMemoryProperties& setProtectedNoFault( Bool32 protectedNoFault_ ) + { + protectedNoFault = protectedNoFault_; + return *this; + } + + operator const VkPhysicalDeviceProtectedMemoryProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedNoFault == rhs.protectedNoFault ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + + public: + void* pNext = nullptr; + Bool32 protectedNoFault; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" ); + + struct PipelineCoverageToColorStateCreateInfoNV + { + PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateFlagsNV flags_ = PipelineCoverageToColorStateCreateFlagsNV(), Bool32 coverageToColorEnable_ = 0, uint32_t coverageToColorLocation_ = 0 ) + : flags( flags_ ) + , coverageToColorEnable( coverageToColorEnable_ ) + , coverageToColorLocation( coverageToColorLocation_ ) + { + } + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) ); + } + + PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageToColorStateCreateInfoNV ) ); + return *this; + } + PipelineCoverageToColorStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV& setFlags( PipelineCoverageToColorStateCreateFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorEnable( Bool32 coverageToColorEnable_ ) + { + coverageToColorEnable = coverageToColorEnable_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV& setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) + { + coverageToColorLocation = coverageToColorLocation_; + return *this; + } + + operator const VkPipelineCoverageToColorStateCreateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( coverageToColorEnable == rhs.coverageToColorEnable ) + && ( coverageToColorLocation == rhs.coverageToColorLocation ); + } + + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + + public: + const void* pNext = nullptr; + PipelineCoverageToColorStateCreateFlagsNV flags; + Bool32 coverageToColorEnable; + uint32_t coverageToColorLocation; + }; + static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT + { + operator const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) + && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + public: + void* pNext = nullptr; + Bool32 filterMinmaxSingleComponentFormats; + Bool32 filterMinmaxImageComponentMapping; + }; + static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" ); + + struct MultisamplePropertiesEXT + { + operator const VkMultisamplePropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MultisamplePropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); + } + + bool operator!=( MultisamplePropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMultisamplePropertiesEXT; + + public: + void* pNext = nullptr; + Extent2D maxSampleLocationGridSize; + }; + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( Bool32 advancedBlendCoherentOperations_ = 0 ) + : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + { + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ); + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ); + return *this; + } + PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT& setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ ) + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } + + operator const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 advancedBlendCoherentOperations; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + operator const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) + && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) + && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) + && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) + && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) + && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t advancedBlendMaxColorAttachments; + Bool32 advancedBlendIndependentBlend; + Bool32 advancedBlendNonPremultipliedSrcColor; + Bool32 advancedBlendNonPremultipliedDstColor; + Bool32 advancedBlendCorrelatedOverlap; + Bool32 advancedBlendAllOperations; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" ); + + struct ImageFormatListCreateInfoKHR + { + ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0, const Format* pViewFormats_ = nullptr ) + : viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + { + } + + ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) ); + } + + ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageFormatListCreateInfoKHR ) ); + return *this; + } + ImageFormatListCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageFormatListCreateInfoKHR& setViewFormatCount( uint32_t viewFormatCount_ ) + { + viewFormatCount = viewFormatCount_; + return *this; + } + + ImageFormatListCreateInfoKHR& setPViewFormats( const Format* pViewFormats_ ) + { + pViewFormats = pViewFormats_; + return *this; + } + + operator const VkImageFormatListCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( viewFormatCount == rhs.viewFormatCount ) + && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageFormatListCreateInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t viewFormatCount; + const Format* pViewFormats; + }; + static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct ValidationCacheCreateInfoEXT + { + ValidationCacheCreateInfoEXT( ValidationCacheCreateFlagsEXT flags_ = ValidationCacheCreateFlagsEXT(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr ) + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + { + } + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) ); + } + + ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationCacheCreateInfoEXT ) ); + return *this; + } + ValidationCacheCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ValidationCacheCreateInfoEXT& setFlags( ValidationCacheCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + ValidationCacheCreateInfoEXT& setInitialDataSize( size_t initialDataSize_ ) + { + initialDataSize = initialDataSize_; + return *this; + } + + ValidationCacheCreateInfoEXT& setPInitialData( const void* pInitialData_ ) + { + pInitialData = pInitialData_; + return *this; + } + + operator const VkValidationCacheCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + + public: + const void* pNext = nullptr; + ValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; + }; + static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct ShaderModuleValidationCacheCreateInfoEXT + { + ShaderModuleValidationCacheCreateInfoEXT( ValidationCacheEXT validationCache_ = ValidationCacheEXT() ) + : validationCache( validationCache_ ) + { + } + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) ); + } + + ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) ); + return *this; + } + ShaderModuleValidationCacheCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT& setValidationCache( ValidationCacheEXT validationCache_ ) + { + validationCache = validationCache_; + return *this; + } + + operator const VkShaderModuleValidationCacheCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( validationCache == rhs.validationCache ); + } + + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + + public: + const void* pNext = nullptr; + ValidationCacheEXT validationCache; + }; + static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMaintenance3Properties + { + operator const VkPhysicalDeviceMaintenance3Properties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) + && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + + public: + void* pNext = nullptr; + uint32_t maxPerSetDescriptors; + DeviceSize maxMemoryAllocationSize; + }; + static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + struct DescriptorSetLayoutSupport + { + operator const VkDescriptorSetLayoutSupport&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutSupport const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supported == rhs.supported ); + } + + bool operator!=( DescriptorSetLayoutSupport const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetLayoutSupport; + + public: + void* pNext = nullptr; + Bool32 supported; + }; + static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" ); + + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + + struct PhysicalDeviceShaderDrawParameterFeatures + { + PhysicalDeviceShaderDrawParameterFeatures( Bool32 shaderDrawParameters_ = 0 ) + : shaderDrawParameters( shaderDrawParameters_ ) + { + } + + PhysicalDeviceShaderDrawParameterFeatures( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) ); + } + + PhysicalDeviceShaderDrawParameterFeatures& operator=( VkPhysicalDeviceShaderDrawParameterFeatures const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceShaderDrawParameterFeatures ) ); + return *this; + } + PhysicalDeviceShaderDrawParameterFeatures& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDrawParameterFeatures& setShaderDrawParameters( Bool32 shaderDrawParameters_ ) + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } + + operator const VkPhysicalDeviceShaderDrawParameterFeatures&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderDrawParameters == rhs.shaderDrawParameters ); + } + + bool operator!=( PhysicalDeviceShaderDrawParameterFeatures const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShaderDrawParameterFeatures; + + public: + void* pNext = nullptr; + Bool32 shaderDrawParameters; + }; + static_assert( sizeof( PhysicalDeviceShaderDrawParameterFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParameterFeatures ), "struct and wrapper have different size!" ); + + struct DebugUtilsLabelEXT + { + DebugUtilsLabelEXT( const char* pLabelName_ = nullptr, std::array const& color_ = { { 0, 0, 0, 0 } } ) + : pLabelName( pLabelName_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + } + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) ); + } + + DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsLabelEXT ) ); + return *this; + } + DebugUtilsLabelEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsLabelEXT& setPLabelName( const char* pLabelName_ ) + { + pLabelName = pLabelName_; + return *this; + } + + DebugUtilsLabelEXT& setColor( std::array color_ ) + { + memcpy( &color, color_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator const VkDebugUtilsLabelEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsLabelEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pLabelName == rhs.pLabelName ) + && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( DebugUtilsLabelEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsLabelEXT; + + public: + const void* pNext = nullptr; + const char* pLabelName; + float color[4]; + }; + static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); + + struct MemoryHostPointerPropertiesEXT + { + MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = 0 ) + : memoryTypeBits( memoryTypeBits_ ) + { + } + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) ); + } + + MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryHostPointerPropertiesEXT ) ); + return *this; + } + MemoryHostPointerPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryHostPointerPropertiesEXT& setMemoryTypeBits( uint32_t memoryTypeBits_ ) + { + memoryTypeBits = memoryTypeBits_; + return *this; + } + + operator const VkMemoryHostPointerPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + PhysicalDeviceExternalMemoryHostPropertiesEXT( DeviceSize minImportedHostPointerAlignment_ = 0 ) + : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + { + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) ); + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) ); + return *this; + } + PhysicalDeviceExternalMemoryHostPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT& setMinImportedHostPointerAlignment( DeviceSize minImportedHostPointerAlignment_ ) + { + minImportedHostPointerAlignment = minImportedHostPointerAlignment_; + return *this; + } + + operator const VkPhysicalDeviceExternalMemoryHostPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + + public: + void* pNext = nullptr; + DeviceSize minImportedHostPointerAlignment; + }; + static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = 0, float maxExtraPrimitiveOverestimationSize_ = 0, float extraPrimitiveOverestimationSizeGranularity_ = 0, Bool32 primitiveUnderestimation_ = 0, Bool32 conservativePointAndLineRasterization_ = 0, Bool32 degenerateTrianglesRasterized_ = 0, Bool32 degenerateLinesRasterized_ = 0, Bool32 fullyCoveredFragmentShaderInputVariable_ = 0, Bool32 conservativeRasterizationPostDepthCoverage_ = 0 ) + : primitiveOverestimationSize( primitiveOverestimationSize_ ) + , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) + , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) + , primitiveUnderestimation( primitiveUnderestimation_ ) + , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) + , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) + , degenerateLinesRasterized( degenerateLinesRasterized_ ) + , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) + , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + { + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) ); + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) ); + return *this; + } + PhysicalDeviceConservativeRasterizationPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveOverestimationSize( float primitiveOverestimationSize_ ) + { + primitiveOverestimationSize = primitiveOverestimationSize_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setMaxExtraPrimitiveOverestimationSize( float maxExtraPrimitiveOverestimationSize_ ) + { + maxExtraPrimitiveOverestimationSize = maxExtraPrimitiveOverestimationSize_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setExtraPrimitiveOverestimationSizeGranularity( float extraPrimitiveOverestimationSizeGranularity_ ) + { + extraPrimitiveOverestimationSizeGranularity = extraPrimitiveOverestimationSizeGranularity_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setPrimitiveUnderestimation( Bool32 primitiveUnderestimation_ ) + { + primitiveUnderestimation = primitiveUnderestimation_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativePointAndLineRasterization( Bool32 conservativePointAndLineRasterization_ ) + { + conservativePointAndLineRasterization = conservativePointAndLineRasterization_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateTrianglesRasterized( Bool32 degenerateTrianglesRasterized_ ) + { + degenerateTrianglesRasterized = degenerateTrianglesRasterized_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setDegenerateLinesRasterized( Bool32 degenerateLinesRasterized_ ) + { + degenerateLinesRasterized = degenerateLinesRasterized_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setFullyCoveredFragmentShaderInputVariable( Bool32 fullyCoveredFragmentShaderInputVariable_ ) + { + fullyCoveredFragmentShaderInputVariable = fullyCoveredFragmentShaderInputVariable_; + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& setConservativeRasterizationPostDepthCoverage( Bool32 conservativeRasterizationPostDepthCoverage_ ) + { + conservativeRasterizationPostDepthCoverage = conservativeRasterizationPostDepthCoverage_; + return *this; + } + + operator const VkPhysicalDeviceConservativeRasterizationPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) + && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) + && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) + && ( primitiveUnderestimation == rhs.primitiveUnderestimation ) + && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) + && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) + && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) + && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) + && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + + public: + void* pNext = nullptr; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + Bool32 primitiveUnderestimation; + Bool32 conservativePointAndLineRasterization; + Bool32 degenerateTrianglesRasterized; + Bool32 degenerateLinesRasterized; + Bool32 fullyCoveredFragmentShaderInputVariable; + Bool32 conservativeRasterizationPostDepthCoverage; + }; + static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceShaderCorePropertiesAMD + { + operator const VkPhysicalDeviceShaderCorePropertiesAMD&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderEngineCount == rhs.shaderEngineCount ) + && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) + && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) + && ( simdPerComputeUnit == rhs.simdPerComputeUnit ) + && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) + && ( wavefrontSize == rhs.wavefrontSize ) + && ( sgprsPerSimd == rhs.sgprsPerSimd ) + && ( minSgprAllocation == rhs.minSgprAllocation ) + && ( maxSgprAllocation == rhs.maxSgprAllocation ) + && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) + && ( vgprsPerSimd == rhs.vgprsPerSimd ) + && ( minVgprAllocation == rhs.minVgprAllocation ) + && ( maxVgprAllocation == rhs.maxVgprAllocation ) + && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + + public: + void* pNext = nullptr; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; + }; + static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceDescriptorIndexingFeaturesEXT + { + PhysicalDeviceDescriptorIndexingFeaturesEXT( Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0, Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0, Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0, Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0, Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0, Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0, Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0, Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0, Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0, Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0, Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0, Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0, Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0, Bool32 descriptorBindingPartiallyBound_ = 0, Bool32 descriptorBindingVariableDescriptorCount_ = 0, Bool32 runtimeDescriptorArray_ = 0 ) + : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + { + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) ); + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) ); + return *this; + } + PhysicalDeviceDescriptorIndexingFeaturesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ ) + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ ) + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ ) + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ ) + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ ) + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ ) + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + operator const VkPhysicalDeviceDescriptorIndexingFeaturesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) + && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) + && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) + && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) + && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) + && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) + && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) + && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) + && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) + && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) + && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) + && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) + && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) + && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) + && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) + && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) + && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) + && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) + && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) + && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT; + + public: + void* pNext = nullptr; + Bool32 shaderInputAttachmentArrayDynamicIndexing; + Bool32 shaderUniformTexelBufferArrayDynamicIndexing; + Bool32 shaderStorageTexelBufferArrayDynamicIndexing; + Bool32 shaderUniformBufferArrayNonUniformIndexing; + Bool32 shaderSampledImageArrayNonUniformIndexing; + Bool32 shaderStorageBufferArrayNonUniformIndexing; + Bool32 shaderStorageImageArrayNonUniformIndexing; + Bool32 shaderInputAttachmentArrayNonUniformIndexing; + Bool32 shaderUniformTexelBufferArrayNonUniformIndexing; + Bool32 shaderStorageTexelBufferArrayNonUniformIndexing; + Bool32 descriptorBindingUniformBufferUpdateAfterBind; + Bool32 descriptorBindingSampledImageUpdateAfterBind; + Bool32 descriptorBindingStorageImageUpdateAfterBind; + Bool32 descriptorBindingStorageBufferUpdateAfterBind; + Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + Bool32 descriptorBindingUpdateUnusedWhilePending; + Bool32 descriptorBindingPartiallyBound; + Bool32 descriptorBindingVariableDescriptorCount; + Bool32 runtimeDescriptorArray; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceDescriptorIndexingPropertiesEXT + { + operator const VkPhysicalDeviceDescriptorIndexingPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) + && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) + && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) + && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) + && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) + && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) + && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) + && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) + && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) + && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) + && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) + && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) + && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) + && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) + && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) + && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) + && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) + && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) + && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) + && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) + && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) + && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) + && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + Bool32 shaderUniformBufferArrayNonUniformIndexingNative; + Bool32 shaderSampledImageArrayNonUniformIndexingNative; + Bool32 shaderStorageBufferArrayNonUniformIndexingNative; + Bool32 shaderStorageImageArrayNonUniformIndexingNative; + Bool32 shaderInputAttachmentArrayNonUniformIndexingNative; + Bool32 robustBufferAccessUpdateAfterBind; + Bool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" ); + + struct DescriptorSetVariableDescriptorCountAllocateInfoEXT + { + DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0, const uint32_t* pDescriptorCounts_ = nullptr ) + : descriptorSetCount( descriptorSetCount_ ) + , pDescriptorCounts( pDescriptorCounts_ ) + { + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) ); + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) ); + return *this; + } + DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT& setDescriptorSetCount( uint32_t descriptorSetCount_ ) + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT& setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) + { + pDescriptorCounts = pDescriptorCounts_; + return *this; + } + + operator const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( descriptorSetCount == rhs.descriptorSetCount ) + && ( pDescriptorCounts == rhs.pDescriptorCounts ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" ); + + struct DescriptorSetVariableDescriptorCountLayoutSupportEXT + { + operator const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + public: + void* pNext = nullptr; + uint32_t maxVariableDescriptorCount; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" ); + + struct PipelineVertexInputDivisorStateCreateInfoEXT + { + PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0, const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr ) + : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) + , pVertexBindingDivisors( pVertexBindingDivisors_ ) + { + } + + PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) ); + } + + PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) ); + return *this; + } + PipelineVertexInputDivisorStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT& setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) + { + vertexBindingDivisorCount = vertexBindingDivisorCount_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT& setPVertexBindingDivisors( const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) + { + pVertexBindingDivisors = pVertexBindingDivisors_; + return *this; + } + + operator const VkPipelineVertexInputDivisorStateCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) + && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); + } + + bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t vertexBindingDivisorCount; + const VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; + }; + static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + { + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = 0 ) + : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + { + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ); + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ); + return *this; + } + PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT& setMaxVertexAttribDivisor( uint32_t maxVertexAttribDivisor_ ) + { + maxVertexAttribDivisor = maxVertexAttribDivisor_; + return *this; + } + + operator const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + + public: + void* pNext = nullptr; + uint32_t maxVertexAttribDivisor; + }; + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct ImportAndroidHardwareBufferInfoANDROID + { + ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr ) + : buffer( buffer_ ) + { + } + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) ); + } + + ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportAndroidHardwareBufferInfoANDROID ) ); + return *this; + } + ImportAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID& setBuffer( struct AHardwareBuffer* buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator const VkImportAndroidHardwareBufferInfoANDROID&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + + public: + const void* pNext = nullptr; + struct AHardwareBuffer* buffer; + }; + static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct AndroidHardwareBufferUsageANDROID + { + operator const VkAndroidHardwareBufferUsageANDROID&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); + } + + bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + + public: + void* pNext = nullptr; + uint64_t androidHardwareBufferUsage; + }; + static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct AndroidHardwareBufferPropertiesANDROID + { + operator const VkAndroidHardwareBufferPropertiesANDROID&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( allocationSize == rhs.allocationSize ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + + public: + void* pNext = nullptr; + DeviceSize allocationSize; + uint32_t memoryTypeBits; + }; + static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + MemoryGetAndroidHardwareBufferInfoANDROID( DeviceMemory memory_ = DeviceMemory() ) + : memory( memory_ ) + { + } + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) ); + } + + MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) ); + return *this; + } + MemoryGetAndroidHardwareBufferInfoANDROID& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + operator const VkMemoryGetAndroidHardwareBufferInfoANDROID&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ); + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + }; + static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct ExternalFormatANDROID + { + ExternalFormatANDROID( uint64_t externalFormat_ = 0 ) + : externalFormat( externalFormat_ ) + { + } + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) ); + } + + ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalFormatANDROID ) ); + return *this; + } + ExternalFormatANDROID& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalFormatANDROID& setExternalFormat( uint64_t externalFormat_ ) + { + externalFormat = externalFormat_; + return *this; + } + + operator const VkExternalFormatANDROID&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalFormatANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalFormat == rhs.externalFormat ); + } + + bool operator!=( ExternalFormatANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalFormatANDROID; + + public: + void* pNext = nullptr; + uint64_t externalFormat; + }; + static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS + }; + + struct PresentInfoKHR + { + PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , swapchainCount( swapchainCount_ ) + , pSwapchains( pSwapchains_ ) + , pImageIndices( pImageIndices_ ) + , pResults( pResults_ ) + { + } + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentInfoKHR ) ); + } + + PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( PresentInfoKHR ) ); + return *this; + } + PresentInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ ) + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ ) + { + pSwapchains = pSwapchains_; + return *this; + } + + PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ ) + { + pImageIndices = pImageIndices_; + return *this; + } + + PresentInfoKHR& setPResults( Result* pResults_ ) + { + pResults = pResults_; + return *this; + } + + operator const VkPresentInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PresentInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pSwapchains == rhs.pSwapchains ) + && ( pImageIndices == rhs.pImageIndices ) + && ( pResults == rhs.pResults ); + } + + bool operator!=( PresentInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePresentInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const Semaphore* pWaitSemaphores; + uint32_t swapchainCount; + const SwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + Result* pResults; + }; + static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); + + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT + }; + + struct PipelineDynamicStateCreateInfo + { + PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr ) + : flags( flags_ ) + , dynamicStateCount( dynamicStateCount_ ) + , pDynamicStates( pDynamicStates_ ) + { + } + + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) ); + } + + PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDynamicStateCreateInfo ) ); + return *this; + } + PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ ) + { + dynamicStateCount = dynamicStateCount_; + return *this; + } + + PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ ) + { + pDynamicStates = pDynamicStates_; + return *this; + } + + operator const VkPipelineDynamicStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( dynamicStateCount == rhs.dynamicStateCount ) + && ( pDynamicStates == rhs.pDynamicStates ); + } + + bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const DynamicState* pDynamicStates; + }; + static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" ); + + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + eDescriptorSetKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + }; + + struct DescriptorUpdateTemplateCreateInfo + { + DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags flags_ = DescriptorUpdateTemplateCreateFlags(), uint32_t descriptorUpdateEntryCount_ = 0, const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr, DescriptorUpdateTemplateType templateType_ = DescriptorUpdateTemplateType::eDescriptorSet, DescriptorSetLayout descriptorSetLayout_ = DescriptorSetLayout(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, PipelineLayout pipelineLayout_ = PipelineLayout(), uint32_t set_ = 0 ) + : flags( flags_ ) + , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) + , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + { + } + + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) ); + } + + DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorUpdateTemplateCreateInfo ) ); + return *this; + } + DescriptorUpdateTemplateCreateInfo& setPNext( void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setFlags( DescriptorUpdateTemplateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) + { + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) + { + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setTemplateType( DescriptorUpdateTemplateType templateType_ ) + { + templateType = templateType_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setPipelineLayout( PipelineLayout pipelineLayout_ ) + { + pipelineLayout = pipelineLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo& setSet( uint32_t set_ ) + { + set = set_; + return *this; + } + + operator const VkDescriptorUpdateTemplateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) + && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) + && ( templateType == rhs.templateType ) + && ( descriptorSetLayout == rhs.descriptorSetLayout ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( set == rhs.set ); + } + + bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + + public: + void* pNext = nullptr; + DescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + DescriptorUpdateTemplateType templateType; + DescriptorSetLayout descriptorSetLayout; + PipelineBindPoint pipelineBindPoint; + PipelineLayout pipelineLayout; + uint32_t set; + }; + static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" ); + + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + + enum class ObjectType + { + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX, + eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT + }; + + struct DebugUtilsObjectNameInfoEXT + { + DebugUtilsObjectNameInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, uint64_t objectHandle_ = 0, const char* pObjectName_ = nullptr ) + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , pObjectName( pObjectName_ ) + { + } + + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) ); + } + + DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectNameInfoEXT ) ); + return *this; + } + DebugUtilsObjectNameInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectNameInfoEXT& setObjectType( ObjectType objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectNameInfoEXT& setObjectHandle( uint64_t objectHandle_ ) + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectNameInfoEXT& setPObjectName( const char* pObjectName_ ) + { + pObjectName = pObjectName_; + return *this; + } + + operator const VkDebugUtilsObjectNameInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( objectHandle == rhs.objectHandle ) + && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + + public: + const void* pNext = nullptr; + ObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; + }; + static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" ); + + struct DebugUtilsObjectTagInfoEXT + { + DebugUtilsObjectTagInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, uint64_t objectHandle_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr ) + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + { + } + + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) ); + } + + DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsObjectTagInfoEXT ) ); + return *this; + } + DebugUtilsObjectTagInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setObjectType( ObjectType objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setObjectHandle( uint64_t objectHandle_ ) + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setTagName( uint64_t tagName_ ) + { + tagName = tagName_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setTagSize( size_t tagSize_ ) + { + tagSize = tagSize_; + return *this; + } + + DebugUtilsObjectTagInfoEXT& setPTag( const void* pTag_ ) + { + pTag = pTag_; + return *this; + } + + operator const VkDebugUtilsObjectTagInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( objectHandle == rhs.objectHandle ) + && ( tagName == rhs.tagName ) + && ( tagSize == rhs.tagSize ) + && ( pTag == rhs.pTag ); + } + + bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + + public: + const void* pNext = nullptr; + ObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; + }; + static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" ); + + struct DebugUtilsMessengerCallbackDataEXT + { + DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT flags_ = DebugUtilsMessengerCallbackDataFlagsEXT(), const char* pMessageIdName_ = nullptr, int32_t messageIdNumber_ = 0, const char* pMessage_ = nullptr, uint32_t queueLabelCount_ = 0, DebugUtilsLabelEXT* pQueueLabels_ = nullptr, uint32_t cmdBufLabelCount_ = 0, DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr, uint32_t objectCount_ = 0, DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr ) + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( queueLabelCount_ ) + , pQueueLabels( pQueueLabels_ ) + , cmdBufLabelCount( cmdBufLabelCount_ ) + , pCmdBufLabels( pCmdBufLabels_ ) + , objectCount( objectCount_ ) + , pObjects( pObjects_ ) + { + } + + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) ); + } + + DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCallbackDataEXT ) ); + return *this; + } + DebugUtilsMessengerCallbackDataEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setFlags( DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPMessageIdName( const char* pMessageIdName_ ) + { + pMessageIdName = pMessageIdName_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setMessageIdNumber( int32_t messageIdNumber_ ) + { + messageIdNumber = messageIdNumber_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPMessage( const char* pMessage_ ) + { + pMessage = pMessage_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setQueueLabelCount( uint32_t queueLabelCount_ ) + { + queueLabelCount = queueLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPQueueLabels( DebugUtilsLabelEXT* pQueueLabels_ ) + { + pQueueLabels = pQueueLabels_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) + { + cmdBufLabelCount = cmdBufLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPCmdBufLabels( DebugUtilsLabelEXT* pCmdBufLabels_ ) + { + pCmdBufLabels = pCmdBufLabels_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setObjectCount( uint32_t objectCount_ ) + { + objectCount = objectCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT& setPObjects( DebugUtilsObjectNameInfoEXT* pObjects_ ) + { + pObjects = pObjects_; + return *this; + } + + operator const VkDebugUtilsMessengerCallbackDataEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pMessageIdName == rhs.pMessageIdName ) + && ( messageIdNumber == rhs.messageIdNumber ) + && ( pMessage == rhs.pMessage ) + && ( queueLabelCount == rhs.queueLabelCount ) + && ( pQueueLabels == rhs.pQueueLabels ) + && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) + && ( pCmdBufLabels == rhs.pCmdBufLabels ) + && ( objectCount == rhs.objectCount ) + && ( pObjects == rhs.pObjects ); + } + + bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + + public: + const void* pNext = nullptr; + DebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + DebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + DebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + DebugUtilsObjectNameInfoEXT* pObjects; + }; + static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" ); + + enum class QueueFlagBits + { + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT + }; + + using QueueFlags = Flags; + + VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) + { + return QueueFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits ) + { + return ~( QueueFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected) + }; + }; + + struct QueueFamilyProperties + { + operator const VkQueueFamilyProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( QueueFamilyProperties const& rhs ) const + { + return ( queueFlags == rhs.queueFlags ) + && ( queueCount == rhs.queueCount ) + && ( timestampValidBits == rhs.timestampValidBits ) + && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); + } + + bool operator!=( QueueFamilyProperties const& rhs ) const + { + return !operator==( rhs ); + } + + QueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + Extent3D minImageTransferGranularity; + }; + static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); + + struct QueueFamilyProperties2 + { + operator const VkQueueFamilyProperties2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( QueueFamilyProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( queueFamilyProperties == rhs.queueFamilyProperties ); + } + + bool operator!=( QueueFamilyProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eQueueFamilyProperties2; + + public: + void* pNext = nullptr; + QueueFamilyProperties queueFamilyProperties; + }; + static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); + + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + + enum class DeviceQueueCreateFlagBits + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + using DeviceQueueCreateFlags = Flags; + + VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) + { + return DeviceQueueCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) + { + return ~( DeviceQueueCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected) + }; + }; + + struct DeviceQueueCreateInfo + { + DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueCount_ = 0, const float* pQueuePriorities_ = nullptr ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( queueCount_ ) + , pQueuePriorities( pQueuePriorities_ ) + { + } + + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) ); + } + + DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueCreateInfo ) ); + return *this; + } + DeviceQueueCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ ) + { + queueCount = queueCount_; + return *this; + } + + DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ ) + { + pQueuePriorities = pQueuePriorities_; + return *this; + } + + operator const VkDeviceQueueCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceQueueCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( queueCount == rhs.queueCount ) + && ( pQueuePriorities == rhs.pQueuePriorities ); + } + + bool operator!=( DeviceQueueCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceQueueCreateInfo; + + public: + const void* pNext = nullptr; + DeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; + }; + static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" ); + + struct DeviceCreateInfo + { + DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(), uint32_t queueCreateInfoCount_ = 0, const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr, const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr ) + : flags( flags_ ) + , queueCreateInfoCount( queueCreateInfoCount_ ) + , pQueueCreateInfos( pQueueCreateInfos_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + , pEnabledFeatures( pEnabledFeatures_ ) + { + } + + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceCreateInfo ) ); + } + + DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceCreateInfo ) ); + return *this; + } + DeviceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) + { + queueCreateInfoCount = queueCreateInfoCount_; + return *this; + } + + DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ ) + { + pQueueCreateInfos = pQueueCreateInfos_; + return *this; + } + + DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ ) + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + + DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + + DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ ) + { + pEnabledFeatures = pEnabledFeatures_; + return *this; + } + + operator const VkDeviceCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) + && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) + && ( enabledLayerCount == rhs.enabledLayerCount ) + && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) + && ( enabledExtensionCount == rhs.enabledExtensionCount ) + && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) + && ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceCreateInfo; + + public: + const void* pNext = nullptr; + DeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const DeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const PhysicalDeviceFeatures* pEnabledFeatures; + }; + static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); + + struct DeviceQueueInfo2 + { + DeviceQueueInfo2( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueIndex_ = 0 ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueIndex( queueIndex_ ) + { + } + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) ); + } + + DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueInfo2 ) ); + return *this; + } + DeviceQueueInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceQueueInfo2& setFlags( DeviceQueueCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DeviceQueueInfo2& setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueInfo2& setQueueIndex( uint32_t queueIndex_ ) + { + queueIndex = queueIndex_; + return *this; + } + + operator const VkDeviceQueueInfo2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceQueueInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( queueIndex == rhs.queueIndex ); + } + + bool operator!=( DeviceQueueInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceQueueInfo2; + + public: + const void* pNext = nullptr; + DeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; + }; + static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); + + enum class MemoryPropertyFlagBits + { + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT + }; + + using MemoryPropertyFlags = Flags; + + VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) + { + return MemoryPropertyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) + { + return ~( MemoryPropertyFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) + }; + }; + + struct MemoryType + { + operator const VkMemoryType&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryType const& rhs ) const + { + return ( propertyFlags == rhs.propertyFlags ) + && ( heapIndex == rhs.heapIndex ); + } + + bool operator!=( MemoryType const& rhs ) const + { + return !operator==( rhs ); + } + + MemoryPropertyFlags propertyFlags; + uint32_t heapIndex; + }; + static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); + + enum class MemoryHeapFlagBits + { + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT + }; + + using MemoryHeapFlags = Flags; + + VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) + { + return MemoryHeapFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) + { + return ~( MemoryHeapFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance) + }; + }; + + struct MemoryHeap + { + operator const VkMemoryHeap&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryHeap const& rhs ) const + { + return ( size == rhs.size ) + && ( flags == rhs.flags ); + } + + bool operator!=( MemoryHeap const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize size; + MemoryHeapFlags flags; + }; + static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMemoryProperties + { + operator const VkPhysicalDeviceMemoryProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const + { + return ( memoryTypeCount == rhs.memoryTypeCount ) + && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 ) + && ( memoryHeapCount == rhs.memoryHeapCount ) + && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 ); + } + + bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t memoryTypeCount; + MemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceMemoryProperties2 + { + operator const VkPhysicalDeviceMemoryProperties2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryProperties == rhs.memoryProperties ); + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + + public: + void* pNext = nullptr; + PhysicalDeviceMemoryProperties memoryProperties; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + enum class AccessFlagBits + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX, + eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT + }; + + using AccessFlags = Flags; + + VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) + { + return AccessFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits ) + { + return ~( AccessFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) + }; + }; + + struct MemoryBarrier + { + MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() ) + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + { + } + + MemoryBarrier( VkMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryBarrier ) ); + } + + MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryBarrier ) ); + return *this; + } + MemoryBarrier& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + operator const VkMemoryBarrier&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryBarrier const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ); + } + + bool operator!=( MemoryBarrier const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryBarrier; + + public: + const void* pNext = nullptr; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + }; + static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); + + struct BufferMemoryBarrier + { + BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize size_ = 0 ) + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) ); + } + + BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferMemoryBarrier ) ); + return *this; + } + BufferMemoryBarrier& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + BufferMemoryBarrier& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + BufferMemoryBarrier& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + operator const VkBufferMemoryBarrier&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BufferMemoryBarrier const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) + && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( BufferMemoryBarrier const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferMemoryBarrier; + + public: + const void* pNext = nullptr; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + Buffer buffer; + DeviceSize offset; + DeviceSize size; + }; + static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); + + enum class BufferUsageFlagBits + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT + }; + + using BufferUsageFlags = Flags; + + VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) + { + return BufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits ) + { + return ~( BufferUsageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) + }; + }; + + enum class BufferCreateFlagBits + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT + }; + + using BufferCreateFlags = Flags; + + VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) + { + return BufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits ) + { + return ~( BufferCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) + }; + }; + + struct BufferCreateInfo + { + BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr ) + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + { + } + + BufferCreateInfo( VkBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCreateInfo ) ); + } + + BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferCreateInfo ) ); + return *this; + } + BufferCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BufferCreateInfo& setFlags( BufferCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + BufferCreateInfo& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + BufferCreateInfo& setUsage( BufferUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + BufferCreateInfo& setSharingMode( SharingMode sharingMode_ ) + { + sharingMode = sharingMode_; + return *this; + } + + BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + operator const VkBufferCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BufferCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( size == rhs.size ) + && ( usage == rhs.usage ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( BufferCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBufferCreateInfo; + + public: + const void* pNext = nullptr; + BufferCreateFlags flags; + DeviceSize size; + BufferUsageFlags usage; + SharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + }; + static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); + + enum class ShaderStageFlagBits + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL + }; + + using ShaderStageFlags = Flags; + + VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) + { + return ShaderStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits ) + { + return ~( ShaderStageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) + }; + }; + + struct DescriptorSetLayoutBinding + { + DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( descriptorCount_ ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( pImmutableSamplers_ ) + { + } + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) ); + } + + DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBinding ) ); + return *this; + } + DescriptorSetLayoutBinding& setBinding( uint32_t binding_ ) + { + binding = binding_; + return *this; + } + + DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ ) + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ ) + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ ) + { + stageFlags = stageFlags_; + return *this; + } + + DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ ) + { + pImmutableSamplers = pImmutableSamplers_; + return *this; + } + + operator const VkDescriptorSetLayoutBinding&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutBinding const& rhs ) const + { + return ( binding == rhs.binding ) + && ( descriptorType == rhs.descriptorType ) + && ( descriptorCount == rhs.descriptorCount ) + && ( stageFlags == rhs.stageFlags ) + && ( pImmutableSamplers == rhs.pImmutableSamplers ); + } + + bool operator!=( DescriptorSetLayoutBinding const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t binding; + DescriptorType descriptorType; + uint32_t descriptorCount; + ShaderStageFlags stageFlags; + const Sampler* pImmutableSamplers; + }; + static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" ); + + struct PipelineShaderStageCreateInfo + { + PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(), ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, ShaderModule module_ = ShaderModule(), const char* pName_ = nullptr, const SpecializationInfo* pSpecializationInfo_ = nullptr ) + : flags( flags_ ) + , stage( stage_ ) + , module( module_ ) + , pName( pName_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } + + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) ); + } + + PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineShaderStageCreateInfo ) ); + return *this; + } + PipelineShaderStageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ ) + { + stage = stage_; + return *this; + } + + PipelineShaderStageCreateInfo& setModule( ShaderModule module_ ) + { + module = module_; + return *this; + } + + PipelineShaderStageCreateInfo& setPName( const char* pName_ ) + { + pName = pName_; + return *this; + } + + PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ ) + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } + + operator const VkPipelineShaderStageCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineShaderStageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stage == rhs.stage ) + && ( module == rhs.module ) + && ( pName == rhs.pName ) + && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + + public: + const void* pNext = nullptr; + PipelineShaderStageCreateFlags flags; + ShaderStageFlagBits stage; + ShaderModule module; + const char* pName; + const SpecializationInfo* pSpecializationInfo; + }; + static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" ); + + struct PushConstantRange + { + PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(), uint32_t offset_ = 0, uint32_t size_ = 0 ) + : stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + PushConstantRange( VkPushConstantRange const & rhs ) + { + memcpy( this, &rhs, sizeof( PushConstantRange ) ); + } + + PushConstantRange& operator=( VkPushConstantRange const & rhs ) + { + memcpy( this, &rhs, sizeof( PushConstantRange ) ); + return *this; + } + PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ ) + { + stageFlags = stageFlags_; + return *this; + } + + PushConstantRange& setOffset( uint32_t offset_ ) + { + offset = offset_; + return *this; + } + + PushConstantRange& setSize( uint32_t size_ ) + { + size = size_; + return *this; + } + + operator const VkPushConstantRange&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PushConstantRange const& rhs ) const + { + return ( stageFlags == rhs.stageFlags ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( PushConstantRange const& rhs ) const + { + return !operator==( rhs ); + } + + ShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; + }; + static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); + + struct PipelineLayoutCreateInfo + { + PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(), uint32_t setLayoutCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr, uint32_t pushConstantRangeCount_ = 0, const PushConstantRange* pPushConstantRanges_ = nullptr ) + : flags( flags_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + { + } + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) ); + } + + PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineLayoutCreateInfo ) ); + return *this; + } + PipelineLayoutCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ ) + { + setLayoutCount = setLayoutCount_; + return *this; + } + + PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ ) + { + pSetLayouts = pSetLayouts_; + return *this; + } + + PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ ) + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + + operator const VkPipelineLayoutCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineLayoutCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( setLayoutCount == rhs.setLayoutCount ) + && ( pSetLayouts == rhs.pSetLayouts ) + && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) + && ( pPushConstantRanges == rhs.pPushConstantRanges ); + } + + bool operator!=( PipelineLayoutCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineLayoutCreateInfo; + + public: + const void* pNext = nullptr; + PipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const DescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const PushConstantRange* pPushConstantRanges; + }; + static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); + + struct ShaderStatisticsInfoAMD + { + operator const VkShaderStatisticsInfoAMD&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ShaderStatisticsInfoAMD const& rhs ) const + { + return ( shaderStageMask == rhs.shaderStageMask ) + && ( resourceUsage == rhs.resourceUsage ) + && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) + && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) + && ( numAvailableVgprs == rhs.numAvailableVgprs ) + && ( numAvailableSgprs == rhs.numAvailableSgprs ) + && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ); + } + + bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const + { + return !operator==( rhs ); + } + + ShaderStageFlags shaderStageMask; + ShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; + }; + static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); + + enum class ImageUsageFlagBits + { + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT + }; + + using ImageUsageFlags = Flags; + + VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) + { + return ImageUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits ) + { + return ~( ImageUsageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) + }; + }; + + struct SharedPresentSurfaceCapabilitiesKHR + { + operator const VkSharedPresentSurfaceCapabilitiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); + } + + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + + public: + void* pNext = nullptr; + ImageUsageFlags sharedPresentSupportedUsageFlags; + }; + static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + + struct ImageViewUsageCreateInfo + { + ImageViewUsageCreateInfo( ImageUsageFlags usage_ = ImageUsageFlags() ) + : usage( usage_ ) + { + } + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) ); + } + + ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewUsageCreateInfo ) ); + return *this; + } + ImageViewUsageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageViewUsageCreateInfo& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + operator const VkImageViewUsageCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageViewUsageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( usage == rhs.usage ); + } + + bool operator!=( ImageViewUsageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageViewUsageCreateInfo; + + public: + const void* pNext = nullptr; + ImageUsageFlags usage; + }; + static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); + + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + + enum class ImageCreateFlagBits + { + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT, + eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT + }; + + using ImageCreateFlags = Flags; + + VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) + { + return ImageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits ) + { + return ~( ImageCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) + }; + }; + + struct PhysicalDeviceImageFormatInfo2 + { + PhysicalDeviceImageFormatInfo2( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), ImageCreateFlags flags_ = ImageCreateFlags() ) + : format( format_ ) + , type( type_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , flags( flags_ ) + { + } + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) ); + } + + PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceImageFormatInfo2 ) ); + return *this; + } + PhysicalDeviceImageFormatInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setType( ImageType type_ ) + { + type = type_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setTiling( ImageTiling tiling_ ) + { + tiling = tiling_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + PhysicalDeviceImageFormatInfo2& setFlags( ImageCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator const VkPhysicalDeviceImageFormatInfo2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( tiling == rhs.tiling ) + && ( usage == rhs.usage ) + && ( flags == rhs.flags ); + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + + public: + const void* pNext = nullptr; + Format format; + ImageType type; + ImageTiling tiling; + ImageUsageFlags usage; + ImageCreateFlags flags; + }; + static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + enum class PipelineCreateFlagBits + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE + }; + + using PipelineCreateFlags = Flags; + + VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) + { + return PipelineCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) + { + return ~( PipelineCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) + }; + }; + + struct ComputePipelineCreateInfo + { + ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 ) + : flags( flags_ ) + , stage( stage_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) ); + } + + ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ComputePipelineCreateInfo ) ); + return *this; + } + ComputePipelineCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ ) + { + stage = stage_; + return *this; + } + + ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ ) + { + layout = layout_; + return *this; + } + + ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ ) + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ ) + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator const VkComputePipelineCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ComputePipelineCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stage == rhs.stage ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( ComputePipelineCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eComputePipelineCreateInfo; + + public: + const void* pNext = nullptr; + PipelineCreateFlags flags; + PipelineShaderStageCreateInfo stage; + PipelineLayout layout; + Pipeline basePipelineHandle; + int32_t basePipelineIndex; + }; + static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" ); + + enum class ColorComponentFlagBits + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + using ColorComponentFlags = Flags; + + VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) + { + return ColorComponentFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits ) + { + return ~( ColorComponentFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA) + }; + }; + + struct PipelineColorBlendAttachmentState + { + PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() ) + : blendEnable( blendEnable_ ) + , srcColorBlendFactor( srcColorBlendFactor_ ) + , dstColorBlendFactor( dstColorBlendFactor_ ) + , colorBlendOp( colorBlendOp_ ) + , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) + , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) + , alphaBlendOp( alphaBlendOp_ ) + , colorWriteMask( colorWriteMask_ ) + { + } + + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) ); + } + + PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAttachmentState ) ); + return *this; + } + PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ ) + { + blendEnable = blendEnable_; + return *this; + } + + PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ ) + { + colorBlendOp = colorBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ ) + { + alphaBlendOp = alphaBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ ) + { + colorWriteMask = colorWriteMask_; + return *this; + } + + operator const VkPipelineColorBlendAttachmentState&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineColorBlendAttachmentState const& rhs ) const + { + return ( blendEnable == rhs.blendEnable ) + && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) + && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) + && ( colorBlendOp == rhs.colorBlendOp ) + && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) + && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) + && ( alphaBlendOp == rhs.alphaBlendOp ) + && ( colorWriteMask == rhs.colorWriteMask ); + } + + bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const + { + return !operator==( rhs ); + } + + Bool32 blendEnable; + BlendFactor srcColorBlendFactor; + BlendFactor dstColorBlendFactor; + BlendOp colorBlendOp; + BlendFactor srcAlphaBlendFactor; + BlendFactor dstAlphaBlendFactor; + BlendOp alphaBlendOp; + ColorComponentFlags colorWriteMask; + }; + static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" ); + + struct PipelineColorBlendStateCreateInfo + { + PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(), Bool32 logicOpEnable_ = 0, LogicOp logicOp_ = LogicOp::eClear, uint32_t attachmentCount_ = 0, const PipelineColorBlendAttachmentState* pAttachments_ = nullptr, std::array const& blendConstants_ = { { 0, 0, 0, 0 } } ) + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + { + memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); + } + + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) ); + } + + PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendStateCreateInfo ) ); + return *this; + } + PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ ) + { + logicOpEnable = logicOpEnable_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ ) + { + logicOp = logicOp_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ ) + { + attachmentCount = attachmentCount_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ ) + { + pAttachments = pAttachments_; + return *this; + } + + PipelineColorBlendStateCreateInfo& setBlendConstants( std::array blendConstants_ ) + { + memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator const VkPipelineColorBlendStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( logicOpEnable == rhs.logicOpEnable ) + && ( logicOp == rhs.logicOp ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineColorBlendStateCreateFlags flags; + Bool32 logicOpEnable; + LogicOp logicOp; + uint32_t attachmentCount; + const PipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; + }; + static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" ); + + enum class FenceCreateFlagBits + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + using FenceCreateFlags = Flags; + + VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) + { + return FenceCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits ) + { + return ~( FenceCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(FenceCreateFlagBits::eSignaled) + }; + }; + + struct FenceCreateInfo + { + FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() ) + : flags( flags_ ) + { + } + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceCreateInfo ) ); + } + + FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceCreateInfo ) ); + return *this; + } + FenceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FenceCreateInfo& setFlags( FenceCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator const VkFenceCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( FenceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( FenceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFenceCreateInfo; + + public: + const void* pNext = nullptr; + FenceCreateFlags flags; + }; + static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); + + enum class FormatFeatureFlagBits + { + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT + }; + + using FormatFeatureFlags = Flags; + + VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) + { + return FormatFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) + { + return ~( FormatFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) + }; + }; + + struct FormatProperties + { + operator const VkFormatProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( FormatProperties const& rhs ) const + { + return ( linearTilingFeatures == rhs.linearTilingFeatures ) + && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) + && ( bufferFeatures == rhs.bufferFeatures ); + } + + bool operator!=( FormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + FormatFeatureFlags linearTilingFeatures; + FormatFeatureFlags optimalTilingFeatures; + FormatFeatureFlags bufferFeatures; + }; + static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); + + struct FormatProperties2 + { + operator const VkFormatProperties2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( FormatProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( formatProperties == rhs.formatProperties ); + } + + bool operator!=( FormatProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFormatProperties2; + + public: + void* pNext = nullptr; + FormatProperties formatProperties; + }; + static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); + + using FormatProperties2KHR = FormatProperties2; + + enum class QueryControlFlagBits + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + using QueryControlFlags = Flags; + + VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) + { + return QueryControlFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits ) + { + return ~( QueryControlFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueryControlFlagBits::ePrecise) + }; + }; + + enum class QueryResultFlagBits + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT + }; + + using QueryResultFlags = Flags; + + VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) + { + return QueryResultFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits ) + { + return ~( QueryResultFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial) + }; + }; + + enum class CommandBufferUsageFlagBits + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + using CommandBufferUsageFlags = Flags; + + VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) + { + return CommandBufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) + { + return ~( CommandBufferUsageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse) + }; + }; + + enum class QueryPipelineStatisticFlagBits + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT + }; + + using QueryPipelineStatisticFlags = Flags; + + VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) + { + return QueryPipelineStatisticFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) + { + return ~( QueryPipelineStatisticFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations) + }; + }; + + struct CommandBufferInheritanceInfo + { + CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() ) + : renderPass( renderPass_ ) + , subpass( subpass_ ) + , framebuffer( framebuffer_ ) + , occlusionQueryEnable( occlusionQueryEnable_ ) + , queryFlags( queryFlags_ ) + , pipelineStatistics( pipelineStatistics_ ) + { + } + + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) ); + } + + CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferInheritanceInfo ) ); + return *this; + } + CommandBufferInheritanceInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ ) + { + subpass = subpass_; + return *this; + } + + CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ ) + { + framebuffer = framebuffer_; + return *this; + } + + CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ ) + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ ) + { + queryFlags = queryFlags_; + return *this; + } + + CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator const VkCommandBufferInheritanceInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( CommandBufferInheritanceInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( renderPass == rhs.renderPass ) + && ( subpass == rhs.subpass ) + && ( framebuffer == rhs.framebuffer ) + && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) + && ( queryFlags == rhs.queryFlags ) + && ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( CommandBufferInheritanceInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandBufferInheritanceInfo; + + public: + const void* pNext = nullptr; + RenderPass renderPass; + uint32_t subpass; + Framebuffer framebuffer; + Bool32 occlusionQueryEnable; + QueryControlFlags queryFlags; + QueryPipelineStatisticFlags pipelineStatistics; + }; + static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" ); + + struct CommandBufferBeginInfo + { + CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(), const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr ) + : flags( flags_ ) + , pInheritanceInfo( pInheritanceInfo_ ) + { + } + + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) ); + } + + CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandBufferBeginInfo ) ); + return *this; + } + CommandBufferBeginInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ ) + { + flags = flags_; + return *this; + } + + CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ ) + { + pInheritanceInfo = pInheritanceInfo_; + return *this; + } + + operator const VkCommandBufferBeginInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( CommandBufferBeginInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pInheritanceInfo == rhs.pInheritanceInfo ); + } + + bool operator!=( CommandBufferBeginInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandBufferBeginInfo; + + public: + const void* pNext = nullptr; + CommandBufferUsageFlags flags; + const CommandBufferInheritanceInfo* pInheritanceInfo; + }; + static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + + struct QueryPoolCreateInfo + { + QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(), QueryType queryType_ = QueryType::eOcclusion, uint32_t queryCount_ = 0, QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() ) + : flags( flags_ ) + , queryType( queryType_ ) + , queryCount( queryCount_ ) + , pipelineStatistics( pipelineStatistics_ ) + { + } + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) ); + } + + QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( QueryPoolCreateInfo ) ); + return *this; + } + QueryPoolCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + QueryPoolCreateInfo& setQueryType( QueryType queryType_ ) + { + queryType = queryType_; + return *this; + } + + QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ ) + { + queryCount = queryCount_; + return *this; + } + + QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator const VkQueryPoolCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( QueryPoolCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queryType == rhs.queryType ) + && ( queryCount == rhs.queryCount ) + && ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( QueryPoolCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eQueryPoolCreateInfo; + + public: + const void* pNext = nullptr; + QueryPoolCreateFlags flags; + QueryType queryType; + uint32_t queryCount; + QueryPipelineStatisticFlags pipelineStatistics; + }; + static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); + + enum class ImageAspectFlagBits + { + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT + }; + + using ImageAspectFlags = Flags; + + VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) + { + return ImageAspectFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits ) + { + return ~( ImageAspectFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) + }; + }; + + struct ImageSubresource + { + ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 ) + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , arrayLayer( arrayLayer_ ) + { + } + + ImageSubresource( VkImageSubresource const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresource ) ); + } + + ImageSubresource& operator=( VkImageSubresource const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresource ) ); + return *this; + } + ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresource& setMipLevel( uint32_t mipLevel_ ) + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresource& setArrayLayer( uint32_t arrayLayer_ ) + { + arrayLayer = arrayLayer_; + return *this; + } + + operator const VkImageSubresource&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSubresource const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( mipLevel == rhs.mipLevel ) + && ( arrayLayer == rhs.arrayLayer ); + } + + bool operator!=( ImageSubresource const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; + }; + static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); + + struct ImageSubresourceLayers + { + ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 ) + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) ); + } + + ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceLayers ) ); + return *this; + } + ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ ) + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ ) + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ ) + { + layerCount = layerCount_; + return *this; + } + + operator const VkImageSubresourceLayers&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSubresourceLayers const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( mipLevel == rhs.mipLevel ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceLayers const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; + }; + static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); + + struct ImageSubresourceRange + { + ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t baseMipLevel_ = 0, uint32_t levelCount_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 ) + : aspectMask( aspectMask_ ) + , baseMipLevel( baseMipLevel_ ) + , levelCount( levelCount_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceRange ) ); + } + + ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageSubresourceRange ) ); + return *this; + } + ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ ) + { + baseMipLevel = baseMipLevel_; + return *this; + } + + ImageSubresourceRange& setLevelCount( uint32_t levelCount_ ) + { + levelCount = levelCount_; + return *this; + } + + ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ ) + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceRange& setLayerCount( uint32_t layerCount_ ) + { + layerCount = layerCount_; + return *this; + } + + operator const VkImageSubresourceRange&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageSubresourceRange const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( baseMipLevel == rhs.baseMipLevel ) + && ( levelCount == rhs.levelCount ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceRange const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; + }; + static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); + + struct ImageMemoryBarrier + { + ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Image image_ = Image(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() ) + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + { + } + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) ); + } + + ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageMemoryBarrier ) ); + return *this; + } + ImageMemoryBarrier& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ ) + { + oldLayout = oldLayout_; + return *this; + } + + ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ ) + { + newLayout = newLayout_; + return *this; + } + + ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier& setImage( Image image_ ) + { + image = image_; + return *this; + } + + ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ ) + { + subresourceRange = subresourceRange_; + return *this; + } + + operator const VkImageMemoryBarrier&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageMemoryBarrier const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( oldLayout == rhs.oldLayout ) + && ( newLayout == rhs.newLayout ) + && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) + && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) + && ( image == rhs.image ) + && ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageMemoryBarrier const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageMemoryBarrier; + + public: + const void* pNext = nullptr; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + ImageLayout oldLayout; + ImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + Image image; + ImageSubresourceRange subresourceRange; + }; + static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); + + struct ImageViewCreateInfo + { + ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(), Image image_ = Image(), ImageViewType viewType_ = ImageViewType::e1D, Format format_ = Format::eUndefined, ComponentMapping components_ = ComponentMapping(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() ) + : flags( flags_ ) + , image( image_ ) + , viewType( viewType_ ) + , format( format_ ) + , components( components_ ) + , subresourceRange( subresourceRange_ ) + { + } + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) ); + } + + ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageViewCreateInfo ) ); + return *this; + } + ImageViewCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImageViewCreateInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + ImageViewCreateInfo& setViewType( ImageViewType viewType_ ) + { + viewType = viewType_; + return *this; + } + + ImageViewCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + ImageViewCreateInfo& setComponents( ComponentMapping components_ ) + { + components = components_; + return *this; + } + + ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ ) + { + subresourceRange = subresourceRange_; + return *this; + } + + operator const VkImageViewCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageViewCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( image == rhs.image ) + && ( viewType == rhs.viewType ) + && ( format == rhs.format ) + && ( components == rhs.components ) + && ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageViewCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageViewCreateInfo; + + public: + const void* pNext = nullptr; + ImageViewCreateFlags flags; + Image image; + ImageViewType viewType; + Format format; + ComponentMapping components; + ImageSubresourceRange subresourceRange; + }; + static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); + + struct ImageCopy + { + ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() ) + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + ImageCopy( VkImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCopy ) ); + } + + ImageCopy& operator=( VkImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCopy ) ); + return *this; + } + ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ ) + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy& setSrcOffset( Offset3D srcOffset_ ) + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ ) + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy& setDstOffset( Offset3D dstOffset_ ) + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + operator const VkImageCopy&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageCopy const& rhs ) const + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresourceLayers srcSubresource; + Offset3D srcOffset; + ImageSubresourceLayers dstSubresource; + Offset3D dstOffset; + Extent3D extent; + }; + static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); + + struct ImageBlit + { + ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), std::array const& srcOffsets_ = { { Offset3D(), Offset3D() } }, ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), std::array const& dstOffsets_ = { { Offset3D(), Offset3D() } } ) + : srcSubresource( srcSubresource_ ) + , dstSubresource( dstSubresource_ ) + { + memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) ); + memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) ); + } + + ImageBlit( VkImageBlit const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageBlit ) ); + } + + ImageBlit& operator=( VkImageBlit const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageBlit ) ); + return *this; + } + ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ ) + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit& setSrcOffsets( std::array srcOffsets_ ) + { + memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) ); + return *this; + } + + ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ ) + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit& setDstOffsets( std::array dstOffsets_ ) + { + memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) ); + return *this; + } + + operator const VkImageBlit&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageBlit const& rhs ) const + { + return ( srcSubresource == rhs.srcSubresource ) + && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 ) + && ( dstSubresource == rhs.dstSubresource ) + && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 ); + } + + bool operator!=( ImageBlit const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresourceLayers srcSubresource; + Offset3D srcOffsets[2]; + ImageSubresourceLayers dstSubresource; + Offset3D dstOffsets[2]; + }; + static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); + + struct BufferImageCopy + { + BufferImageCopy( DeviceSize bufferOffset_ = 0, uint32_t bufferRowLength_ = 0, uint32_t bufferImageHeight_ = 0, ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(), Offset3D imageOffset_ = Offset3D(), Extent3D imageExtent_ = Extent3D() ) + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + BufferImageCopy( VkBufferImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferImageCopy ) ); + } + + BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) + { + memcpy( this, &rhs, sizeof( BufferImageCopy ) ); + return *this; + } + BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ ) + { + bufferOffset = bufferOffset_; + return *this; + } + + BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ ) + { + bufferRowLength = bufferRowLength_; + return *this; + } + + BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ ) + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ ) + { + imageSubresource = imageSubresource_; + return *this; + } + + BufferImageCopy& setImageOffset( Offset3D imageOffset_ ) + { + imageOffset = imageOffset_; + return *this; + } + + BufferImageCopy& setImageExtent( Extent3D imageExtent_ ) + { + imageExtent = imageExtent_; + return *this; + } + + operator const VkBufferImageCopy&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BufferImageCopy const& rhs ) const + { + return ( bufferOffset == rhs.bufferOffset ) + && ( bufferRowLength == rhs.bufferRowLength ) + && ( bufferImageHeight == rhs.bufferImageHeight ) + && ( imageSubresource == rhs.imageSubresource ) + && ( imageOffset == rhs.imageOffset ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( BufferImageCopy const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + ImageSubresourceLayers imageSubresource; + Offset3D imageOffset; + Extent3D imageExtent; + }; + static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); + + struct ImageResolve + { + ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() ) + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + ImageResolve( VkImageResolve const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageResolve ) ); + } + + ImageResolve& operator=( VkImageResolve const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageResolve ) ); + return *this; + } + ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ ) + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve& setSrcOffset( Offset3D srcOffset_ ) + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ ) + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve& setDstOffset( Offset3D dstOffset_ ) + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + operator const VkImageResolve&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageResolve const& rhs ) const + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresourceLayers srcSubresource; + Offset3D srcOffset; + ImageSubresourceLayers dstSubresource; + Offset3D dstOffset; + Extent3D extent; + }; + static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); + + struct ClearAttachment + { + ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t colorAttachment_ = 0, ClearValue clearValue_ = ClearValue() ) + : aspectMask( aspectMask_ ) + , colorAttachment( colorAttachment_ ) + , clearValue( clearValue_ ) + { + } + + ClearAttachment( VkClearAttachment const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearAttachment ) ); + } + + ClearAttachment& operator=( VkClearAttachment const & rhs ) + { + memcpy( this, &rhs, sizeof( ClearAttachment ) ); + return *this; + } + ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + ClearAttachment& setColorAttachment( uint32_t colorAttachment_ ) + { + colorAttachment = colorAttachment_; + return *this; + } + + ClearAttachment& setClearValue( ClearValue clearValue_ ) + { + clearValue = clearValue_; + return *this; + } + + operator const VkClearAttachment&() const + { + return *reinterpret_cast(this); + } + + ImageAspectFlags aspectMask; + uint32_t colorAttachment; + ClearValue clearValue; + }; + static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); + + struct InputAttachmentAspectReference + { + InputAttachmentAspectReference( uint32_t subpass_ = 0, uint32_t inputAttachmentIndex_ = 0, ImageAspectFlags aspectMask_ = ImageAspectFlags() ) + : subpass( subpass_ ) + , inputAttachmentIndex( inputAttachmentIndex_ ) + , aspectMask( aspectMask_ ) + { + } + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) + { + memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) ); + } + + InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) + { + memcpy( this, &rhs, sizeof( InputAttachmentAspectReference ) ); + return *this; + } + InputAttachmentAspectReference& setSubpass( uint32_t subpass_ ) + { + subpass = subpass_; + return *this; + } + + InputAttachmentAspectReference& setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + InputAttachmentAspectReference& setAspectMask( ImageAspectFlags aspectMask_ ) + { + aspectMask = aspectMask_; + return *this; + } + + operator const VkInputAttachmentAspectReference&() const + { + return *reinterpret_cast(this); + } + + bool operator==( InputAttachmentAspectReference const& rhs ) const + { + return ( subpass == rhs.subpass ) + && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) + && ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( InputAttachmentAspectReference const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t subpass; + uint32_t inputAttachmentIndex; + ImageAspectFlags aspectMask; + }; + static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" ); + + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + + struct RenderPassInputAttachmentAspectCreateInfo + { + RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0, const InputAttachmentAspectReference* pAspectReferences_ = nullptr ) + : aspectReferenceCount( aspectReferenceCount_ ) + , pAspectReferences( pAspectReferences_ ) + { + } + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) ); + } + + RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassInputAttachmentAspectCreateInfo ) ); + return *this; + } + RenderPassInputAttachmentAspectCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo& setAspectReferenceCount( uint32_t aspectReferenceCount_ ) + { + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo& setPAspectReferences( const InputAttachmentAspectReference* pAspectReferences_ ) + { + pAspectReferences = pAspectReferences_; + return *this; + } + + operator const VkRenderPassInputAttachmentAspectCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( aspectReferenceCount == rhs.aspectReferenceCount ) + && ( pAspectReferences == rhs.pAspectReferences ); + } + + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + + public: + const void* pNext = nullptr; + uint32_t aspectReferenceCount; + const InputAttachmentAspectReference* pAspectReferences; + }; + static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" ); + + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + + struct BindImagePlaneMemoryInfo + { + BindImagePlaneMemoryInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor ) + : planeAspect( planeAspect_ ) + { + } + + BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) ); + } + + BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindImagePlaneMemoryInfo ) ); + return *this; + } + BindImagePlaneMemoryInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindImagePlaneMemoryInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ ) + { + planeAspect = planeAspect_; + return *this; + } + + operator const VkBindImagePlaneMemoryInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BindImagePlaneMemoryInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + + public: + const void* pNext = nullptr; + ImageAspectFlagBits planeAspect; + }; + static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); + + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + + struct ImagePlaneMemoryRequirementsInfo + { + ImagePlaneMemoryRequirementsInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor ) + : planeAspect( planeAspect_ ) + { + } + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) ); + } + + ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImagePlaneMemoryRequirementsInfo ) ); + return *this; + } + ImagePlaneMemoryRequirementsInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImagePlaneMemoryRequirementsInfo& setPlaneAspect( ImageAspectFlagBits planeAspect_ ) + { + planeAspect = planeAspect_; + return *this; + } + + operator const VkImagePlaneMemoryRequirementsInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + + public: + const void* pNext = nullptr; + ImageAspectFlagBits planeAspect; + }; + static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); + + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + enum class SparseImageFormatFlagBits + { + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + }; + + using SparseImageFormatFlags = Flags; + + VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) + { + return SparseImageFormatFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) + { + return ~( SparseImageFormatFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize) + }; + }; + + struct SparseImageFormatProperties + { + operator const VkSparseImageFormatProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageFormatProperties const& rhs ) const + { + return ( aspectMask == rhs.aspectMask ) + && ( imageGranularity == rhs.imageGranularity ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + ImageAspectFlags aspectMask; + Extent3D imageGranularity; + SparseImageFormatFlags flags; + }; + static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); + + struct SparseImageMemoryRequirements + { + operator const VkSparseImageMemoryRequirements&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryRequirements const& rhs ) const + { + return ( formatProperties == rhs.formatProperties ) + && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) + && ( imageMipTailSize == rhs.imageMipTailSize ) + && ( imageMipTailOffset == rhs.imageMipTailOffset ) + && ( imageMipTailStride == rhs.imageMipTailStride ); + } + + bool operator!=( SparseImageMemoryRequirements const& rhs ) const + { + return !operator==( rhs ); + } + + SparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + DeviceSize imageMipTailSize; + DeviceSize imageMipTailOffset; + DeviceSize imageMipTailStride; + }; + static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); + + struct SparseImageFormatProperties2 + { + operator const VkSparseImageFormatProperties2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageFormatProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( SparseImageFormatProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSparseImageFormatProperties2; + + public: + void* pNext = nullptr; + SparseImageFormatProperties properties; + }; + static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); + + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + struct SparseImageMemoryRequirements2 + { + operator const VkSparseImageMemoryRequirements2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryRequirements2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSparseImageMemoryRequirements2; + + public: + void* pNext = nullptr; + SparseImageMemoryRequirements memoryRequirements; + }; + static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); + + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + + enum class SparseMemoryBindFlagBits + { + eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + }; + + using SparseMemoryBindFlags = Flags; + + VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) + { + return SparseMemoryBindFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) + { + return ~( SparseMemoryBindFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata) + }; + }; + + struct SparseMemoryBind + { + SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() ) + : resourceOffset( resourceOffset_ ) + , size( size_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + { + } + + SparseMemoryBind( VkSparseMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseMemoryBind ) ); + } + + SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseMemoryBind ) ); + return *this; + } + SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ ) + { + resourceOffset = resourceOffset_; + return *this; + } + + SparseMemoryBind& setSize( DeviceSize size_ ) + { + size = size_; + return *this; + } + + SparseMemoryBind& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator const VkSparseMemoryBind&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseMemoryBind const& rhs ) const + { + return ( resourceOffset == rhs.resourceOffset ) + && ( size == rhs.size ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseMemoryBind const& rhs ) const + { + return !operator==( rhs ); + } + + DeviceSize resourceOffset; + DeviceSize size; + DeviceMemory memory; + DeviceSize memoryOffset; + SparseMemoryBindFlags flags; + }; + static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); + + struct SparseImageMemoryBind + { + SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(), Offset3D offset_ = Offset3D(), Extent3D extent_ = Extent3D(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() ) + : subresource( subresource_ ) + , offset( offset_ ) + , extent( extent_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + { + } + + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) ); + } + + SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBind ) ); + return *this; + } + SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ ) + { + subresource = subresource_; + return *this; + } + + SparseImageMemoryBind& setOffset( Offset3D offset_ ) + { + offset = offset_; + return *this; + } + + SparseImageMemoryBind& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + SparseImageMemoryBind& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ ) + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ ) + { + flags = flags_; + return *this; + } + + operator const VkSparseImageMemoryBind&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryBind const& rhs ) const + { + return ( subresource == rhs.subresource ) + && ( offset == rhs.offset ) + && ( extent == rhs.extent ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageMemoryBind const& rhs ) const + { + return !operator==( rhs ); + } + + ImageSubresource subresource; + Offset3D offset; + Extent3D extent; + DeviceMemory memory; + DeviceSize memoryOffset; + SparseMemoryBindFlags flags; + }; + static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" ); + + struct SparseBufferMemoryBindInfo + { + SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr ) + : buffer( buffer_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) ); + } + + SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseBufferMemoryBindInfo ) ); + return *this; + } + SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ ) + { + bindCount = bindCount_; + return *this; + } + + SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ ) + { + pBinds = pBinds_; + return *this; + } + + operator const VkSparseBufferMemoryBindInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseBufferMemoryBindInfo const& rhs ) const + { + return ( buffer == rhs.buffer ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Buffer buffer; + uint32_t bindCount; + const SparseMemoryBind* pBinds; + }; + static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" ); + + struct SparseImageOpaqueMemoryBindInfo + { + SparseImageOpaqueMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr ) + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) ); + } + + SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) ); + return *this; + } + SparseImageOpaqueMemoryBindInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ ) + { + bindCount = bindCount_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ ) + { + pBinds = pBinds_; + return *this; + } + + operator const VkSparseImageOpaqueMemoryBindInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const + { + return ( image == rhs.image ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Image image; + uint32_t bindCount; + const SparseMemoryBind* pBinds; + }; + static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" ); + + struct SparseImageMemoryBindInfo + { + SparseImageMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseImageMemoryBind* pBinds_ = nullptr ) + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) ); + } + + SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SparseImageMemoryBindInfo ) ); + return *this; + } + SparseImageMemoryBindInfo& setImage( Image image_ ) + { + image = image_; + return *this; + } + + SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ ) + { + bindCount = bindCount_; + return *this; + } + + SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ ) + { + pBinds = pBinds_; + return *this; + } + + operator const VkSparseImageMemoryBindInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SparseImageMemoryBindInfo const& rhs ) const + { + return ( image == rhs.image ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageMemoryBindInfo const& rhs ) const + { + return !operator==( rhs ); + } + + Image image; + uint32_t bindCount; + const SparseImageMemoryBind* pBinds; + }; + static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" ); + + struct BindSparseInfo + { + BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t bufferBindCount_ = 0, const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, uint32_t imageOpaqueBindCount_ = 0, const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, uint32_t imageBindCount_ = 0, const SparseImageMemoryBindInfo* pImageBinds_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , bufferBindCount( bufferBindCount_ ) + , pBufferBinds( pBufferBinds_ ) + , imageOpaqueBindCount( imageOpaqueBindCount_ ) + , pImageOpaqueBinds( pImageOpaqueBinds_ ) + , imageBindCount( imageBindCount_ ) + , pImageBinds( pImageBinds_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + { + } + + BindSparseInfo( VkBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindSparseInfo ) ); + } + + BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( BindSparseInfo ) ); + return *this; + } + BindSparseInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ ) + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ ) + { + bufferBindCount = bufferBindCount_; + return *this; + } + + BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ ) + { + pBufferBinds = pBufferBinds_; + return *this; + } + + BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) + { + imageOpaqueBindCount = imageOpaqueBindCount_; + return *this; + } + + BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) + { + pImageOpaqueBinds = pImageOpaqueBinds_; + return *this; + } + + BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ ) + { + imageBindCount = imageBindCount_; + return *this; + } + + BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ ) + { + pImageBinds = pImageBinds_; + return *this; + } + + BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ ) + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + + operator const VkBindSparseInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( BindSparseInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( bufferBindCount == rhs.bufferBindCount ) + && ( pBufferBinds == rhs.pBufferBinds ) + && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) + && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) + && ( imageBindCount == rhs.imageBindCount ) + && ( pImageBinds == rhs.pImageBinds ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( BindSparseInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eBindSparseInfo; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const Semaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const SparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const SparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const Semaphore* pSignalSemaphores; + }; + static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); + + enum class PipelineStageFlagBits + { + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX + }; + + using PipelineStageFlags = Flags; + + VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) + { + return PipelineStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits ) + { + return ~( PipelineStageFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) + }; + }; + + enum class CommandPoolCreateFlagBits + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + using CommandPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) + { + return CommandPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) + { + return ~( CommandPoolCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected) + }; + }; + + struct CommandPoolCreateInfo + { + CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 ) + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + { + } + + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) ); + } + + CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( CommandPoolCreateInfo ) ); + return *this; + } + CommandPoolCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + operator const VkCommandPoolCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( CommandPoolCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ); + } + + bool operator!=( CommandPoolCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCommandPoolCreateInfo; + + public: + const void* pNext = nullptr; + CommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; + }; + static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" ); + + enum class CommandPoolResetFlagBits + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + using CommandPoolResetFlags = Flags; + + VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) + { + return CommandPoolResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) + { + return ~( CommandPoolResetFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources) + }; + }; + + enum class CommandBufferResetFlagBits + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + using CommandBufferResetFlags = Flags; + + VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) + { + return CommandBufferResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) + { + return ~( CommandBufferResetFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources) + }; + }; + + enum class SampleCountFlagBits + { + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, + e16 = VK_SAMPLE_COUNT_16_BIT, + e32 = VK_SAMPLE_COUNT_32_BIT, + e64 = VK_SAMPLE_COUNT_64_BIT + }; + + using SampleCountFlags = Flags; + + VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) + { + return SampleCountFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits ) + { + return ~( SampleCountFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64) + }; + }; + + struct ImageFormatProperties + { + operator const VkImageFormatProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageFormatProperties const& rhs ) const + { + return ( maxExtent == rhs.maxExtent ) + && ( maxMipLevels == rhs.maxMipLevels ) + && ( maxArrayLayers == rhs.maxArrayLayers ) + && ( sampleCounts == rhs.sampleCounts ) + && ( maxResourceSize == rhs.maxResourceSize ); + } + + bool operator!=( ImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + Extent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + SampleCountFlags sampleCounts; + DeviceSize maxResourceSize; + }; + static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); + + struct ImageCreateInfo + { + ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(), ImageType imageType_ = ImageType::e1D, Format format_ = Format::eUndefined, Extent3D extent_ = Extent3D(), uint32_t mipLevels_ = 0, uint32_t arrayLayers_ = 0, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, ImageLayout initialLayout_ = ImageLayout::eUndefined ) + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , initialLayout( initialLayout_ ) + { + } + + ImageCreateInfo( VkImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCreateInfo ) ); + } + + ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ImageCreateInfo ) ); + return *this; + } + ImageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImageCreateInfo& setFlags( ImageCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImageCreateInfo& setImageType( ImageType imageType_ ) + { + imageType = imageType_; + return *this; + } + + ImageCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + ImageCreateInfo& setExtent( Extent3D extent_ ) + { + extent = extent_; + return *this; + } + + ImageCreateInfo& setMipLevels( uint32_t mipLevels_ ) + { + mipLevels = mipLevels_; + return *this; + } + + ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ ) + { + arrayLayers = arrayLayers_; + return *this; + } + + ImageCreateInfo& setSamples( SampleCountFlagBits samples_ ) + { + samples = samples_; + return *this; + } + + ImageCreateInfo& setTiling( ImageTiling tiling_ ) + { + tiling = tiling_; + return *this; + } + + ImageCreateInfo& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + ImageCreateInfo& setSharingMode( SharingMode sharingMode_ ) + { + sharingMode = sharingMode_; + return *this; + } + + ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ ) + { + initialLayout = initialLayout_; + return *this; + } + + operator const VkImageCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( imageType == rhs.imageType ) + && ( format == rhs.format ) + && ( extent == rhs.extent ) + && ( mipLevels == rhs.mipLevels ) + && ( arrayLayers == rhs.arrayLayers ) + && ( samples == rhs.samples ) + && ( tiling == rhs.tiling ) + && ( usage == rhs.usage ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) + && ( initialLayout == rhs.initialLayout ); + } + + bool operator!=( ImageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageCreateInfo; + + public: + const void* pNext = nullptr; + ImageCreateFlags flags; + ImageType imageType; + Format format; + Extent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + SampleCountFlagBits samples; + ImageTiling tiling; + ImageUsageFlags usage; + SharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + ImageLayout initialLayout; + }; + static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); + + struct PipelineMultisampleStateCreateInfo + { + PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(), SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, Bool32 sampleShadingEnable_ = 0, float minSampleShading_ = 0, const SampleMask* pSampleMask_ = nullptr, Bool32 alphaToCoverageEnable_ = 0, Bool32 alphaToOneEnable_ = 0 ) + : flags( flags_ ) + , rasterizationSamples( rasterizationSamples_ ) + , sampleShadingEnable( sampleShadingEnable_ ) + , minSampleShading( minSampleShading_ ) + , pSampleMask( pSampleMask_ ) + , alphaToCoverageEnable( alphaToCoverageEnable_ ) + , alphaToOneEnable( alphaToOneEnable_ ) + { + } + + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) ); + } + + PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineMultisampleStateCreateInfo ) ); + return *this; + } + PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) + { + rasterizationSamples = rasterizationSamples_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ ) + { + sampleShadingEnable = sampleShadingEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ ) + { + minSampleShading = minSampleShading_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ ) + { + pSampleMask = pSampleMask_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ ) + { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ ) + { + alphaToOneEnable = alphaToOneEnable_; + return *this; + } + + operator const VkPipelineMultisampleStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( rasterizationSamples == rhs.rasterizationSamples ) + && ( sampleShadingEnable == rhs.sampleShadingEnable ) + && ( minSampleShading == rhs.minSampleShading ) + && ( pSampleMask == rhs.pSampleMask ) + && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) + && ( alphaToOneEnable == rhs.alphaToOneEnable ); + } + + bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + + public: + const void* pNext = nullptr; + PipelineMultisampleStateCreateFlags flags; + SampleCountFlagBits rasterizationSamples; + Bool32 sampleShadingEnable; + float minSampleShading; + const SampleMask* pSampleMask; + Bool32 alphaToCoverageEnable; + Bool32 alphaToOneEnable; + }; + static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" ); + + struct GraphicsPipelineCreateInfo + { + GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), uint32_t stageCount_ = 0, const PipelineShaderStageCreateInfo* pStages_ = nullptr, const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, const PipelineViewportStateCreateInfo* pViewportState_ = nullptr, const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, PipelineLayout layout_ = PipelineLayout(), RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 ) + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) ); + } + + GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( GraphicsPipelineCreateInfo ) ); + return *this; + } + GraphicsPipelineCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ ) + { + stageCount = stageCount_; + return *this; + } + + GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ ) + { + pStages = pStages_; + return *this; + } + + GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ ) + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) + { + pInputAssemblyState = pInputAssemblyState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ ) + { + pTessellationState = pTessellationState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ ) + { + pViewportState = pViewportState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ ) + { + pRasterizationState = pRasterizationState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ ) + { + pMultisampleState = pMultisampleState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) + { + pDepthStencilState = pDepthStencilState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ ) + { + pColorBlendState = pColorBlendState_; + return *this; + } + + GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ ) + { + pDynamicState = pDynamicState_; + return *this; + } + + GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ ) + { + layout = layout_; + return *this; + } + + GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ ) + { + renderPass = renderPass_; + return *this; + } + + GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ ) + { + subpass = subpass_; + return *this; + } + + GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ ) + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ ) + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator const VkGraphicsPipelineCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( GraphicsPipelineCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( pVertexInputState == rhs.pVertexInputState ) + && ( pInputAssemblyState == rhs.pInputAssemblyState ) + && ( pTessellationState == rhs.pTessellationState ) + && ( pViewportState == rhs.pViewportState ) + && ( pRasterizationState == rhs.pRasterizationState ) + && ( pMultisampleState == rhs.pMultisampleState ) + && ( pDepthStencilState == rhs.pDepthStencilState ) + && ( pColorBlendState == rhs.pColorBlendState ) + && ( pDynamicState == rhs.pDynamicState ) + && ( layout == rhs.layout ) + && ( renderPass == rhs.renderPass ) + && ( subpass == rhs.subpass ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + + public: + const void* pNext = nullptr; + PipelineCreateFlags flags; + uint32_t stageCount; + const PipelineShaderStageCreateInfo* pStages; + const PipelineVertexInputStateCreateInfo* pVertexInputState; + const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const PipelineTessellationStateCreateInfo* pTessellationState; + const PipelineViewportStateCreateInfo* pViewportState; + const PipelineRasterizationStateCreateInfo* pRasterizationState; + const PipelineMultisampleStateCreateInfo* pMultisampleState; + const PipelineDepthStencilStateCreateInfo* pDepthStencilState; + const PipelineColorBlendStateCreateInfo* pColorBlendState; + const PipelineDynamicStateCreateInfo* pDynamicState; + PipelineLayout layout; + RenderPass renderPass; + uint32_t subpass; + Pipeline basePipelineHandle; + int32_t basePipelineIndex; + }; + static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceLimits + { + operator const VkPhysicalDeviceLimits&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceLimits const& rhs ) const + { + return ( maxImageDimension1D == rhs.maxImageDimension1D ) + && ( maxImageDimension2D == rhs.maxImageDimension2D ) + && ( maxImageDimension3D == rhs.maxImageDimension3D ) + && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) + && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) + && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) + && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) + && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) + && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) + && ( bufferImageGranularity == rhs.bufferImageGranularity ) + && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) + && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) + && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) + && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) + && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) + && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) + && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) + && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) + && ( maxPerStageResources == rhs.maxPerStageResources ) + && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) + && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) + && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) + && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) + && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) + && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) + && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) + && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) + && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) + && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) + && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) + && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) + && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) + && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) + && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) + && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) + && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) + && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) + && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) + && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) + && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) + && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) + && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) + && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) + && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) + && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) + && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) + && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) + && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) + && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) + && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) + && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) + && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) + && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) + && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) + && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) + && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) + && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) + && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) + && ( maxViewports == rhs.maxViewports ) + && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 ) + && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 ) + && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) + && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) + && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) + && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) + && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) + && ( minTexelOffset == rhs.minTexelOffset ) + && ( maxTexelOffset == rhs.maxTexelOffset ) + && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) + && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) + && ( minInterpolationOffset == rhs.minInterpolationOffset ) + && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) + && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) + && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) + && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) + && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) + && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) + && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) + && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) + && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) + && ( maxColorAttachments == rhs.maxColorAttachments ) + && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) + && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) + && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) + && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) + && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) + && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) + && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) + && ( timestampPeriod == rhs.timestampPeriod ) + && ( maxClipDistances == rhs.maxClipDistances ) + && ( maxCullDistances == rhs.maxCullDistances ) + && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) + && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) + && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 ) + && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 ) + && ( pointSizeGranularity == rhs.pointSizeGranularity ) + && ( lineWidthGranularity == rhs.lineWidthGranularity ) + && ( strictLines == rhs.strictLines ) + && ( standardSampleLocations == rhs.standardSampleLocations ) + && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) + && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) + && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + } + + bool operator!=( PhysicalDeviceLimits const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + DeviceSize bufferImageGranularity; + DeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + DeviceSize minTexelBufferOffsetAlignment; + DeviceSize minUniformBufferOffsetAlignment; + DeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + SampleCountFlags framebufferColorSampleCounts; + SampleCountFlags framebufferDepthSampleCounts; + SampleCountFlags framebufferStencilSampleCounts; + SampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + SampleCountFlags sampledImageColorSampleCounts; + SampleCountFlags sampledImageIntegerSampleCounts; + SampleCountFlags sampledImageDepthSampleCounts; + SampleCountFlags sampledImageStencilSampleCounts; + SampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + Bool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + Bool32 strictLines; + Bool32 standardSampleLocations; + DeviceSize optimalBufferCopyOffsetAlignment; + DeviceSize optimalBufferCopyRowPitchAlignment; + DeviceSize nonCoherentAtomSize; + }; + static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProperties + { + operator const VkPhysicalDeviceProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProperties const& rhs ) const + { + return ( apiVersion == rhs.apiVersion ) + && ( driverVersion == rhs.driverVersion ) + && ( vendorID == rhs.vendorID ) + && ( deviceID == rhs.deviceID ) + && ( deviceType == rhs.deviceType ) + && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( limits == rhs.limits ) + && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + PhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + PhysicalDeviceLimits limits; + PhysicalDeviceSparseProperties sparseProperties; + }; + static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceProperties2 + { + operator const VkPhysicalDeviceProperties2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( PhysicalDeviceProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceProperties2; + + public: + void* pNext = nullptr; + PhysicalDeviceProperties properties; + }; + static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct ImageFormatProperties2 + { + operator const VkImageFormatProperties2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImageFormatProperties2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageFormatProperties == rhs.imageFormatProperties ); + } + + bool operator!=( ImageFormatProperties2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImageFormatProperties2; + + public: + void* pNext = nullptr; + ImageFormatProperties imageFormatProperties; + }; + static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); + + using ImageFormatProperties2KHR = ImageFormatProperties2; + + struct PhysicalDeviceSparseImageFormatInfo2 + { + PhysicalDeviceSparseImageFormatInfo2( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageUsageFlags usage_ = ImageUsageFlags(), ImageTiling tiling_ = ImageTiling::eOptimal ) + : format( format_ ) + , type( type_ ) + , samples( samples_ ) + , usage( usage_ ) + , tiling( tiling_ ) + { + } + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) ); + } + + PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) ); + return *this; + } + PhysicalDeviceSparseImageFormatInfo2& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setType( ImageType type_ ) + { + type = type_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setSamples( SampleCountFlagBits samples_ ) + { + samples = samples_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setUsage( ImageUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2& setTiling( ImageTiling tiling_ ) + { + tiling = tiling_; + return *this; + } + + operator const VkPhysicalDeviceSparseImageFormatInfo2&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( samples == rhs.samples ) + && ( usage == rhs.usage ) + && ( tiling == rhs.tiling ); + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + + public: + const void* pNext = nullptr; + Format format; + ImageType type; + SampleCountFlagBits samples; + ImageUsageFlags usage; + ImageTiling tiling; + }; + static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); + + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct SampleLocationsInfoEXT + { + SampleLocationsInfoEXT( SampleCountFlagBits sampleLocationsPerPixel_ = SampleCountFlagBits::e1, Extent2D sampleLocationGridSize_ = Extent2D(), uint32_t sampleLocationsCount_ = 0, const SampleLocationEXT* pSampleLocations_ = nullptr ) + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( sampleLocationsCount_ ) + , pSampleLocations( pSampleLocations_ ) + { + } + + SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) ); + } + + SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SampleLocationsInfoEXT ) ); + return *this; + } + SampleLocationsInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SampleLocationsInfoEXT& setSampleLocationsPerPixel( SampleCountFlagBits sampleLocationsPerPixel_ ) + { + sampleLocationsPerPixel = sampleLocationsPerPixel_; + return *this; + } + + SampleLocationsInfoEXT& setSampleLocationGridSize( Extent2D sampleLocationGridSize_ ) + { + sampleLocationGridSize = sampleLocationGridSize_; + return *this; + } + + SampleLocationsInfoEXT& setSampleLocationsCount( uint32_t sampleLocationsCount_ ) + { + sampleLocationsCount = sampleLocationsCount_; + return *this; + } + + SampleLocationsInfoEXT& setPSampleLocations( const SampleLocationEXT* pSampleLocations_ ) + { + pSampleLocations = pSampleLocations_; + return *this; + } + + operator const VkSampleLocationsInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SampleLocationsInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) + && ( sampleLocationGridSize == rhs.sampleLocationGridSize ) + && ( sampleLocationsCount == rhs.sampleLocationsCount ) + && ( pSampleLocations == rhs.pSampleLocations ); + } + + bool operator!=( SampleLocationsInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSampleLocationsInfoEXT; + + public: + const void* pNext = nullptr; + SampleCountFlagBits sampleLocationsPerPixel; + Extent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const SampleLocationEXT* pSampleLocations; + }; + static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" ); + + struct AttachmentSampleLocationsEXT + { + AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = 0, SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() ) + : attachmentIndex( attachmentIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) ); + } + + AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentSampleLocationsEXT ) ); + return *this; + } + AttachmentSampleLocationsEXT& setAttachmentIndex( uint32_t attachmentIndex_ ) + { + attachmentIndex = attachmentIndex_; + return *this; + } + + AttachmentSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ ) + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator const VkAttachmentSampleLocationsEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentSampleLocationsEXT const& rhs ) const + { + return ( attachmentIndex == rhs.attachmentIndex ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t attachmentIndex; + SampleLocationsInfoEXT sampleLocationsInfo; + }; + static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" ); + + struct SubpassSampleLocationsEXT + { + SubpassSampleLocationsEXT( uint32_t subpassIndex_ = 0, SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() ) + : subpassIndex( subpassIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) ); + } + + SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassSampleLocationsEXT ) ); + return *this; + } + SubpassSampleLocationsEXT& setSubpassIndex( uint32_t subpassIndex_ ) + { + subpassIndex = subpassIndex_; + return *this; + } + + SubpassSampleLocationsEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ ) + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator const VkSubpassSampleLocationsEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassSampleLocationsEXT const& rhs ) const + { + return ( subpassIndex == rhs.subpassIndex ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( SubpassSampleLocationsEXT const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t subpassIndex; + SampleLocationsInfoEXT sampleLocationsInfo; + }; + static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" ); + + struct RenderPassSampleLocationsBeginInfoEXT + { + RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0, const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr, uint32_t postSubpassSampleLocationsCount_ = 0, const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr ) + : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) + , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) + , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) + , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + { + } + + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) ); + } + + RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassSampleLocationsBeginInfoEXT ) ); + return *this; + } + RenderPassSampleLocationsBeginInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) + { + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setPAttachmentInitialSampleLocations( const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) + { + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT& setPPostSubpassSampleLocations( const SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + return *this; + } + + operator const VkRenderPassSampleLocationsBeginInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) + && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) + && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) + && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); + } + + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t attachmentInitialSampleLocationsCount; + const AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const SubpassSampleLocationsEXT* pPostSubpassSampleLocations; + }; + static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" ); + + struct PipelineSampleLocationsStateCreateInfoEXT + { + PipelineSampleLocationsStateCreateInfoEXT( Bool32 sampleLocationsEnable_ = 0, SampleLocationsInfoEXT sampleLocationsInfo_ = SampleLocationsInfoEXT() ) + : sampleLocationsEnable( sampleLocationsEnable_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) ); + } + + PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) ); + return *this; + } + PipelineSampleLocationsStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsEnable( Bool32 sampleLocationsEnable_ ) + { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT& setSampleLocationsInfo( SampleLocationsInfoEXT sampleLocationsInfo_ ) + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator const VkPipelineSampleLocationsStateCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + Bool32 sampleLocationsEnable; + SampleLocationsInfoEXT sampleLocationsInfo; + }; + static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + operator const VkPhysicalDeviceSampleLocationsPropertiesEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) + && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 ) + && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) + && ( variableSampleLocations == rhs.variableSampleLocations ); + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + + public: + void* pNext = nullptr; + SampleCountFlags sampleLocationSampleCounts; + Extent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + Bool32 variableSampleLocations; + }; + static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" ); + + enum class AttachmentDescriptionFlagBits + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + using AttachmentDescriptionFlags = Flags; + + VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) + { + return AttachmentDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) + { + return ~( AttachmentDescriptionFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias) + }; + }; + + struct AttachmentDescription + { + AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined ) + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + { + } + + AttachmentDescription( VkAttachmentDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentDescription ) ); + } + + AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( AttachmentDescription ) ); + return *this; + } + AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ ) + { + flags = flags_; + return *this; + } + + AttachmentDescription& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + AttachmentDescription& setSamples( SampleCountFlagBits samples_ ) + { + samples = samples_; + return *this; + } + + AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ ) + { + loadOp = loadOp_; + return *this; + } + + AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ ) + { + storeOp = storeOp_; + return *this; + } + + AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ ) + { + initialLayout = initialLayout_; + return *this; + } + + AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ ) + { + finalLayout = finalLayout_; + return *this; + } + + operator const VkAttachmentDescription&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AttachmentDescription const& rhs ) const + { + return ( flags == rhs.flags ) + && ( format == rhs.format ) + && ( samples == rhs.samples ) + && ( loadOp == rhs.loadOp ) + && ( storeOp == rhs.storeOp ) + && ( stencilLoadOp == rhs.stencilLoadOp ) + && ( stencilStoreOp == rhs.stencilStoreOp ) + && ( initialLayout == rhs.initialLayout ) + && ( finalLayout == rhs.finalLayout ); + } + + bool operator!=( AttachmentDescription const& rhs ) const + { + return !operator==( rhs ); + } + + AttachmentDescriptionFlags flags; + Format format; + SampleCountFlagBits samples; + AttachmentLoadOp loadOp; + AttachmentStoreOp storeOp; + AttachmentLoadOp stencilLoadOp; + AttachmentStoreOp stencilStoreOp; + ImageLayout initialLayout; + ImageLayout finalLayout; + }; + static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" ); + + enum class StencilFaceFlagBits + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + using StencilFaceFlags = Flags; + + VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) + { + return StencilFaceFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits ) + { + return ~( StencilFaceFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack) + }; + }; + + enum class DescriptorPoolCreateFlagBits + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT + }; + + using DescriptorPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) + { + return DescriptorPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) + { + return ~( DescriptorPoolCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) + }; + }; + + struct DescriptorPoolCreateInfo + { + DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr ) + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( poolSizeCount_ ) + , pPoolSizes( pPoolSizes_ ) + { + } + + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) ); + } + + DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorPoolCreateInfo ) ); + return *this; + } + DescriptorPoolCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ ) + { + maxSets = maxSets_; + return *this; + } + + DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ ) + { + poolSizeCount = poolSizeCount_; + return *this; + } + + DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ ) + { + pPoolSizes = pPoolSizes_; + return *this; + } + + operator const VkDescriptorPoolCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorPoolCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( maxSets == rhs.maxSets ) + && ( poolSizeCount == rhs.poolSizeCount ) + && ( pPoolSizes == rhs.pPoolSizes ); + } + + bool operator!=( DescriptorPoolCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorPoolCreateInfo; + + public: + const void* pNext = nullptr; + DescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const DescriptorPoolSize* pPoolSizes; + }; + static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" ); + + enum class DependencyFlagBits + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT + }; + + using DependencyFlags = Flags; + + VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) + { + return DependencyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits ) + { + return ~( DependencyFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal) + }; + }; + + struct SubpassDependency + { + SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() ) + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + { + } + + SubpassDependency( VkSubpassDependency const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDependency ) ); + } + + SubpassDependency& operator=( VkSubpassDependency const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDependency ) ); + return *this; + } + SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ ) + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency& setDstSubpass( uint32_t dstSubpass_ ) + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ ) + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ ) + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ ) + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ ) + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ ) + { + dependencyFlags = dependencyFlags_; + return *this; + } + + operator const VkSubpassDependency&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassDependency const& rhs ) const + { + return ( srcSubpass == rhs.srcSubpass ) + && ( dstSubpass == rhs.dstSubpass ) + && ( srcStageMask == rhs.srcStageMask ) + && ( dstStageMask == rhs.dstStageMask ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( dependencyFlags == rhs.dependencyFlags ); + } + + bool operator!=( SubpassDependency const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t srcSubpass; + uint32_t dstSubpass; + PipelineStageFlags srcStageMask; + PipelineStageFlags dstStageMask; + AccessFlags srcAccessMask; + AccessFlags dstAccessMask; + DependencyFlags dependencyFlags; + }; + static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); + + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + }; + + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT + }; + + struct SurfaceFormatKHR + { + operator const VkSurfaceFormatKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceFormatKHR const& rhs ) const + { + return ( format == rhs.format ) + && ( colorSpace == rhs.colorSpace ); + } + + bool operator!=( SurfaceFormatKHR const& rhs ) const + { + return !operator==( rhs ); + } + + Format format; + ColorSpaceKHR colorSpace; + }; + static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); + + struct SurfaceFormat2KHR + { + operator const VkSurfaceFormat2KHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceFormat2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceFormat == rhs.surfaceFormat ); + } + + bool operator!=( SurfaceFormat2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSurfaceFormat2KHR; + + public: + void* pNext = nullptr; + SurfaceFormatKHR surfaceFormat; + }; + static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); + + enum class DisplayPlaneAlphaFlagBitsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + using DisplayPlaneAlphaFlagsKHR = Flags; + + VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) + { + return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) + }; + }; + + struct DisplayPlaneCapabilitiesKHR + { + operator const VkDisplayPlaneCapabilitiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const + { + return ( supportedAlpha == rhs.supportedAlpha ) + && ( minSrcPosition == rhs.minSrcPosition ) + && ( maxSrcPosition == rhs.maxSrcPosition ) + && ( minSrcExtent == rhs.minSrcExtent ) + && ( maxSrcExtent == rhs.maxSrcExtent ) + && ( minDstPosition == rhs.minDstPosition ) + && ( maxDstPosition == rhs.maxDstPosition ) + && ( minDstExtent == rhs.minDstExtent ) + && ( maxDstExtent == rhs.maxDstExtent ); + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayPlaneAlphaFlagsKHR supportedAlpha; + Offset2D minSrcPosition; + Offset2D maxSrcPosition; + Extent2D minSrcExtent; + Extent2D maxSrcExtent; + Offset2D minDstPosition; + Offset2D maxDstPosition; + Extent2D minDstExtent; + Extent2D maxDstExtent; + }; + static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); + + enum class CompositeAlphaFlagBitsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + using CompositeAlphaFlagsKHR = Flags; + + VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) + { + return CompositeAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) + { + return ~( CompositeAlphaFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit) + }; + }; + + enum class SurfaceTransformFlagBitsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + using SurfaceTransformFlagsKHR = Flags; + + VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) + { + return SurfaceTransformFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) + { + return ~( SurfaceTransformFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit) + }; + }; + + struct DisplayPropertiesKHR + { + operator const VkDisplayPropertiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPropertiesKHR const& rhs ) const + { + return ( display == rhs.display ) + && ( displayName == rhs.displayName ) + && ( physicalDimensions == rhs.physicalDimensions ) + && ( physicalResolution == rhs.physicalResolution ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( planeReorderPossible == rhs.planeReorderPossible ) + && ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + DisplayKHR display; + const char* displayName; + Extent2D physicalDimensions; + Extent2D physicalResolution; + SurfaceTransformFlagsKHR supportedTransforms; + Bool32 planeReorderPossible; + Bool32 persistentContent; + }; + static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); + + struct DisplaySurfaceCreateInfoKHR + { + DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() ) + : flags( flags_ ) + , displayMode( displayMode_ ) + , planeIndex( planeIndex_ ) + , planeStackIndex( planeStackIndex_ ) + , transform( transform_ ) + , globalAlpha( globalAlpha_ ) + , alphaMode( alphaMode_ ) + , imageExtent( imageExtent_ ) + { + } + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) ); + } + + DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplaySurfaceCreateInfoKHR ) ); + return *this; + } + DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ ) + { + displayMode = displayMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ ) + { + planeIndex = planeIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ ) + { + planeStackIndex = planeStackIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ ) + { + transform = transform_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ ) + { + globalAlpha = globalAlpha_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) + { + alphaMode = alphaMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ ) + { + imageExtent = imageExtent_; + return *this; + } + + operator const VkDisplaySurfaceCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( displayMode == rhs.displayMode ) + && ( planeIndex == rhs.planeIndex ) + && ( planeStackIndex == rhs.planeStackIndex ) + && ( transform == rhs.transform ) + && ( globalAlpha == rhs.globalAlpha ) + && ( alphaMode == rhs.alphaMode ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + + public: + const void* pNext = nullptr; + DisplaySurfaceCreateFlagsKHR flags; + DisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + SurfaceTransformFlagBitsKHR transform; + float globalAlpha; + DisplayPlaneAlphaFlagBitsKHR alphaMode; + Extent2D imageExtent; + }; + static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + + struct SurfaceCapabilitiesKHR + { + operator const VkSurfaceCapabilitiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceCapabilitiesKHR const& rhs ) const + { + return ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ); + } + + bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + uint32_t minImageCount; + uint32_t maxImageCount; + Extent2D currentExtent; + Extent2D minImageExtent; + Extent2D maxImageExtent; + uint32_t maxImageArrayLayers; + SurfaceTransformFlagsKHR supportedTransforms; + SurfaceTransformFlagBitsKHR currentTransform; + CompositeAlphaFlagsKHR supportedCompositeAlpha; + ImageUsageFlags supportedUsageFlags; + }; + static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + + struct SurfaceCapabilities2KHR + { + operator const VkSurfaceCapabilities2KHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceCapabilities2KHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceCapabilities == rhs.surfaceCapabilities ); + } + + bool operator!=( SurfaceCapabilities2KHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSurfaceCapabilities2KHR; + + public: + void* pNext = nullptr; + SurfaceCapabilitiesKHR surfaceCapabilities; + }; + static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); + + enum class DebugReportFlagBitsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + using DebugReportFlagsEXT = Flags; + + VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) + { + return DebugReportFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) + { + return ~( DebugReportFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug) + }; + }; + + struct DebugReportCallbackCreateInfoEXT + { + DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr ) + : flags( flags_ ) + , pfnCallback( pfnCallback_ ) + , pUserData( pUserData_ ) + { + } + + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) ); + } + + DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugReportCallbackCreateInfoEXT ) ); + return *this; + } + DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) + { + pfnCallback = pfnCallback_; + return *this; + } + + DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ ) + { + pUserData = pUserData_; + return *this; + } + + operator const VkDebugReportCallbackCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pfnCallback == rhs.pfnCallback ) + && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + + public: + const void* pNext = nullptr; + DebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; + }; + static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackExt = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, + eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, + eValidationCacheExt = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT + }; + + struct DebugMarkerObjectNameInfoEXT + { + DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr ) + : objectType( objectType_ ) + , object( object_ ) + , pObjectName( pObjectName_ ) + { + } + + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) ); + } + + DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectNameInfoEXT ) ); + return *this; + } + DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ ) + { + object = object_; + return *this; + } + + DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ ) + { + pObjectName = pObjectName_; + return *this; + } + + operator const VkDebugMarkerObjectNameInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( object == rhs.object ) + && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + + public: + const void* pNext = nullptr; + DebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; + }; + static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" ); + + struct DebugMarkerObjectTagInfoEXT + { + DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr ) + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + { + } + + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) ); + } + + DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugMarkerObjectTagInfoEXT ) ); + return *this; + } + DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ ) + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ ) + { + object = object_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ ) + { + tagName = tagName_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ ) + { + tagSize = tagSize_; + return *this; + } + + DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ ) + { + pTag = pTag_; + return *this; + } + + operator const VkDebugMarkerObjectTagInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( object == rhs.object ) + && ( tagName == rhs.tagName ) + && ( tagSize == rhs.tagSize ) + && ( pTag == rhs.pTag ); + } + + bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + + public: + const void* pNext = nullptr; + DebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; + }; + static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" ); + + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + struct PipelineRasterizationStateRasterizationOrderAMD + { + PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict ) + : rasterizationOrder( rasterizationOrder_ ) + { + } + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) ); + } + + PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) ); + return *this; + } + PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ ) + { + rasterizationOrder = rasterizationOrder_; + return *this; + } + + operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( rasterizationOrder == rhs.rasterizationOrder ); + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + + public: + const void* pNext = nullptr; + RasterizationOrderAMD rasterizationOrder; + }; + static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" ); + + enum class ExternalMemoryHandleTypeFlagBitsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + using ExternalMemoryHandleTypeFlagsNV = Flags; + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) + { + return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) + }; + }; + + struct ExternalMemoryImageCreateInfoNV + { + ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() ) + : handleTypes( handleTypes_ ) + { + } + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) ); + } + + ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfoNV ) ); + return *this; + } + ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator const VkExternalMemoryImageCreateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagsNV handleTypes; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); + + struct ExportMemoryAllocateInfoNV + { + ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() ) + : handleTypes( handleTypes_ ) + { + } + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) ); + } + + ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfoNV ) ); + return *this; + } + ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator const VkExportMemoryAllocateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagsNV handleTypes; + }; + static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); + +#ifdef VK_USE_PLATFORM_WIN32_NV + struct ImportMemoryWin32HandleInfoNV + { + ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 ) + : handleType( handleType_ ) + , handle( handle_ ) + { + } + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) ); + } + + ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoNV ) ); + return *this; + } + ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + operator const VkImportMemoryWin32HandleInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ); + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + enum class ExternalMemoryFeatureFlagBitsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + using ExternalMemoryFeatureFlagsNV = Flags; + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) + { + return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) + { + return ~( ExternalMemoryFeatureFlagsNV( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable) + }; + }; + + struct ExternalImageFormatPropertiesNV + { + operator const VkExternalImageFormatPropertiesNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const + { + return ( imageFormatProperties == rhs.imageFormatProperties ) + && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const + { + return !operator==( rhs ); + } + + ImageFormatProperties imageFormatProperties; + ExternalMemoryFeatureFlagsNV externalMemoryFeatures; + ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; + }; + static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); + + enum class ValidationCheckEXT + { + eAll = VK_VALIDATION_CHECK_ALL_EXT, + eShaders = VK_VALIDATION_CHECK_SHADERS_EXT + }; + + struct ValidationFlagsEXT + { + ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr ) + : disabledValidationCheckCount( disabledValidationCheckCount_ ) + , pDisabledValidationChecks( pDisabledValidationChecks_ ) + { + } + + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) ); + } + + ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ValidationFlagsEXT ) ); + return *this; + } + ValidationFlagsEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) + { + disabledValidationCheckCount = disabledValidationCheckCount_; + return *this; + } + + ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ ) + { + pDisabledValidationChecks = pDisabledValidationChecks_; + return *this; + } + + operator const VkValidationFlagsEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ValidationFlagsEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) + && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); + } + + bool operator!=( ValidationFlagsEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eValidationFlagsEXT; + + public: + const void* pNext = nullptr; + uint32_t disabledValidationCheckCount; + ValidationCheckEXT* pDisabledValidationChecks; + }; + static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); + + enum class SubgroupFeatureFlagBits + { + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + using SubgroupFeatureFlags = Flags; + + VULKAN_HPP_INLINE SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) + { + return SubgroupFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) + { + return ~( SubgroupFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV) + }; + }; + + struct PhysicalDeviceSubgroupProperties + { + operator const VkPhysicalDeviceSubgroupProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subgroupSize == rhs.subgroupSize ) + && ( supportedStages == rhs.supportedStages ) + && ( supportedOperations == rhs.supportedOperations ) + && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); + } + + bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + + public: + void* pNext = nullptr; + uint32_t subgroupSize; + ShaderStageFlags supportedStages; + SubgroupFeatureFlags supportedOperations; + Bool32 quadOperationsInAllStages; + }; + static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" ); + + enum class IndirectCommandsLayoutUsageFlagBitsNVX + { + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX, + eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX, + eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX + }; + + using IndirectCommandsLayoutUsageFlagsNVX = Flags; + + VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) + { + return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) + { + return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) + }; + }; + + enum class ObjectEntryUsageFlagBitsNVX + { + eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX, + eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX + }; + + using ObjectEntryUsageFlagsNVX = Flags; + + VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) + { + return ObjectEntryUsageFlagsNVX( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) + { + return ~( ObjectEntryUsageFlagsNVX( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute) + }; + }; + + enum class IndirectCommandsTokenTypeNVX + { + ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, + eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX + }; + + struct IndirectCommandsTokenNVX + { + IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 ) + : tokenType( tokenType_ ) + , buffer( buffer_ ) + , offset( offset_ ) + { + } + + IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) ); + } + + IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsTokenNVX ) ); + return *this; + } + IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ ) + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ ) + { + offset = offset_; + return *this; + } + + operator const VkIndirectCommandsTokenNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( IndirectCommandsTokenNVX const& rhs ) const + { + return ( tokenType == rhs.tokenType ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ); + } + + bool operator!=( IndirectCommandsTokenNVX const& rhs ) const + { + return !operator==( rhs ); + } + + IndirectCommandsTokenTypeNVX tokenType; + Buffer buffer; + DeviceSize offset; + }; + static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" ); + + struct IndirectCommandsLayoutTokenNVX + { + IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::ePipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 ) + : tokenType( tokenType_ ) + , bindingUnit( bindingUnit_ ) + , dynamicCount( dynamicCount_ ) + , divisor( divisor_ ) + { + } + + IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) ); + } + + IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) ); + return *this; + } + IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ ) + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ ) + { + bindingUnit = bindingUnit_; + return *this; + } + + IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ ) + { + dynamicCount = dynamicCount_; + return *this; + } + + IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ ) + { + divisor = divisor_; + return *this; + } + + operator const VkIndirectCommandsLayoutTokenNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const + { + return ( tokenType == rhs.tokenType ) + && ( bindingUnit == rhs.bindingUnit ) + && ( dynamicCount == rhs.dynamicCount ) + && ( divisor == rhs.divisor ); + } + + bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const + { + return !operator==( rhs ); + } + + IndirectCommandsTokenTypeNVX tokenType; + uint32_t bindingUnit; + uint32_t dynamicCount; + uint32_t divisor; + }; + static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" ); + + struct IndirectCommandsLayoutCreateInfoNVX + { + IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr ) + : pipelineBindPoint( pipelineBindPoint_ ) + , flags( flags_ ) + , tokenCount( tokenCount_ ) + , pTokens( pTokens_ ) + { + } + + IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) ); + } + + IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( IndirectCommandsLayoutCreateInfoNVX ) ); + return *this; + } + IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ ) + { + tokenCount = tokenCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ ) + { + pTokens = pTokens_; + return *this; + } + + operator const VkIndirectCommandsLayoutCreateInfoNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( flags == rhs.flags ) + && ( tokenCount == rhs.tokenCount ) + && ( pTokens == rhs.pTokens ); + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX; + + public: + const void* pNext = nullptr; + PipelineBindPoint pipelineBindPoint; + IndirectCommandsLayoutUsageFlagsNVX flags; + uint32_t tokenCount; + const IndirectCommandsLayoutTokenNVX* pTokens; + }; + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" ); + + enum class ObjectEntryTypeNVX + { + eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, + ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, + eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, + eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, + ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX + }; + + struct ObjectTableCreateInfoNVX + { + ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 ) + : objectCount( objectCount_ ) + , pObjectEntryTypes( pObjectEntryTypes_ ) + , pObjectEntryCounts( pObjectEntryCounts_ ) + , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ ) + , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ ) + , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ ) + , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ ) + , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ ) + , maxPipelineLayouts( maxPipelineLayouts_ ) + { + } + + ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) ); + } + + ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableCreateInfoNVX ) ); + return *this; + } + ObjectTableCreateInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ ) + { + objectCount = objectCount_; + return *this; + } + + ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ ) + { + pObjectEntryTypes = pObjectEntryTypes_; + return *this; + } + + ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) + { + pObjectEntryCounts = pObjectEntryCounts_; + return *this; + } + + ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) + { + pObjectEntryUsageFlags = pObjectEntryUsageFlags_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) + { + maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) + { + maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) + { + maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) + { + maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) + { + maxPipelineLayouts = maxPipelineLayouts_; + return *this; + } + + operator const VkObjectTableCreateInfoNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableCreateInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectCount == rhs.objectCount ) + && ( pObjectEntryTypes == rhs.pObjectEntryTypes ) + && ( pObjectEntryCounts == rhs.pObjectEntryCounts ) + && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags ) + && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor ) + && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor ) + && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor ) + && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor ) + && ( maxPipelineLayouts == rhs.maxPipelineLayouts ); + } + + bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eObjectTableCreateInfoNVX; + + public: + const void* pNext = nullptr; + uint32_t objectCount; + const ObjectEntryTypeNVX* pObjectEntryTypes; + const uint32_t* pObjectEntryCounts; + const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; + uint32_t maxUniformBuffersPerDescriptor; + uint32_t maxStorageBuffersPerDescriptor; + uint32_t maxStorageImagesPerDescriptor; + uint32_t maxSampledImagesPerDescriptor; + uint32_t maxPipelineLayouts; + }; + static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableEntryNVX + { + ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() ) + : type( type_ ) + , flags( flags_ ) + { + } + + ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) ); + } + + ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableEntryNVX ) ); + return *this; + } + ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + operator const VkObjectTableEntryNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ); + } + + bool operator!=( ObjectTableEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + }; + static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTablePipelineEntryNVX + { + ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() ) + : type( type_ ) + , flags( flags_ ) + , pipeline( pipeline_ ) + { + } + + ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) ); + } + + ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePipelineEntryNVX ) ); + return *this; + } + ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ ) + { + pipeline = pipeline_; + return *this; + } + + operator const VkObjectTablePipelineEntryNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipeline == rhs.pipeline ); + } + + bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + Pipeline pipeline; + }; + static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableDescriptorSetEntryNVX + { + ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() ) + : type( type_ ) + , flags( flags_ ) + , pipelineLayout( pipelineLayout_ ) + , descriptorSet( descriptorSet_ ) + { + } + + ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) ); + } + + ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) ); + return *this; + } + ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ ) + { + pipelineLayout = pipelineLayout_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ ) + { + descriptorSet = descriptorSet_; + return *this; + } + + operator const VkObjectTableDescriptorSetEntryNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( descriptorSet == rhs.descriptorSet ); + } + + bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + PipelineLayout pipelineLayout; + DescriptorSet descriptorSet; + }; + static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableVertexBufferEntryNVX + { + ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() ) + : type( type_ ) + , flags( flags_ ) + , buffer( buffer_ ) + { + } + + ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) ); + } + + ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) ); + return *this; + } + ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + operator const VkObjectTableVertexBufferEntryNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + Buffer buffer; + }; + static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTableIndexBufferEntryNVX + { + ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer(), IndexType indexType_ = IndexType::eUint16 ) + : type( type_ ) + , flags( flags_ ) + , buffer( buffer_ ) + , indexType( indexType_ ) + { + } + + ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) ); + } + + ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) ); + return *this; + } + ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ ) + { + buffer = buffer_; + return *this; + } + + ObjectTableIndexBufferEntryNVX& setIndexType( IndexType indexType_ ) + { + indexType = indexType_; + return *this; + } + + operator const VkObjectTableIndexBufferEntryNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ) + && ( indexType == rhs.indexType ); + } + + bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + Buffer buffer; + IndexType indexType; + }; + static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" ); + + struct ObjectTablePushConstantEntryNVX + { + ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() ) + : type( type_ ) + , flags( flags_ ) + , pipelineLayout( pipelineLayout_ ) + , stageFlags( stageFlags_ ) + { + } + + ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) ); + } + + ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( ObjectTablePushConstantEntryNVX ) ); + return *this; + } + ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ ) + { + type = type_; + return *this; + } + + ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ ) + { + flags = flags_; + return *this; + } + + ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ ) + { + pipelineLayout = pipelineLayout_; + return *this; + } + + ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ ) + { + stageFlags = stageFlags_; + return *this; + } + + operator const VkObjectTablePushConstantEntryNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( stageFlags == rhs.stageFlags ); + } + + bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const + { + return !operator==( rhs ); + } + + ObjectEntryTypeNVX type; + ObjectEntryUsageFlagsNVX flags; + PipelineLayout pipelineLayout; + ShaderStageFlags stageFlags; + }; + static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" ); + + enum class DescriptorSetLayoutCreateFlagBits + { + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT + }; + + using DescriptorSetLayoutCreateFlags = Flags; + + VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) + { + return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) + { + return ~( DescriptorSetLayoutCreateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) + }; + }; + + struct DescriptorSetLayoutCreateInfo + { + DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(), uint32_t bindingCount_ = 0, const DescriptorSetLayoutBinding* pBindings_ = nullptr ) + : flags( flags_ ) + , bindingCount( bindingCount_ ) + , pBindings( pBindings_ ) + { + } + + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) ); + } + + DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutCreateInfo ) ); + return *this; + } + DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ ) + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ ) + { + pBindings = pBindings_; + return *this; + } + + operator const VkDescriptorSetLayoutCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( bindingCount == rhs.bindingCount ) + && ( pBindings == rhs.pBindings ); + } + + bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + + public: + const void* pNext = nullptr; + DescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const DescriptorSetLayoutBinding* pBindings; + }; + static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" ); + + enum class ExternalMemoryHandleTypeFlagBits + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueFdKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eOpaqueWin32KmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D11TextureKmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12HeapKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eD3D12ResourceKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT + }; + + using ExternalMemoryHandleTypeFlags = Flags; + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) + { + return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) + { + return ~( ExternalMemoryHandleTypeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) + }; + }; + + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + struct PhysicalDeviceExternalImageFormatInfo + { + PhysicalDeviceExternalImageFormatInfo( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : handleType( handleType_ ) + { + } + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) ); + } + + PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalImageFormatInfo ) ); + return *this; + } + PhysicalDeviceExternalImageFormatInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalImageFormatInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkPhysicalDeviceExternalImageFormatInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + struct PhysicalDeviceExternalBufferInfo + { + PhysicalDeviceExternalBufferInfo( BufferCreateFlags flags_ = BufferCreateFlags(), BufferUsageFlags usage_ = BufferUsageFlags(), ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : flags( flags_ ) + , usage( usage_ ) + , handleType( handleType_ ) + { + } + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) ); + } + + PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalBufferInfo ) ); + return *this; + } + PhysicalDeviceExternalBufferInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalBufferInfo& setFlags( BufferCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + PhysicalDeviceExternalBufferInfo& setUsage( BufferUsageFlags usage_ ) + { + usage = usage_; + return *this; + } + + PhysicalDeviceExternalBufferInfo& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkPhysicalDeviceExternalBufferInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( usage == rhs.usage ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + + public: + const void* pNext = nullptr; + BufferCreateFlags flags; + BufferUsageFlags usage; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + struct ExternalMemoryImageCreateInfo + { + ExternalMemoryImageCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) ); + } + + ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryImageCreateInfo ) ); + return *this; + } + ExternalMemoryImageCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator const VkExternalMemoryImageCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); + + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryBufferCreateInfo + { + ExternalMemoryBufferCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) ); + } + + ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExternalMemoryBufferCreateInfo ) ); + return *this; + } + ExternalMemoryBufferCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExternalMemoryBufferCreateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator const VkExternalMemoryBufferCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); + + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExportMemoryAllocateInfo + { + ExportMemoryAllocateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = ExternalMemoryHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) ); + } + + ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportMemoryAllocateInfo ) ); + return *this; + } + ExportMemoryAllocateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfo& setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator const VkExportMemoryAllocateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportMemoryAllocateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportMemoryAllocateInfo; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); + + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoKHR + { + ImportMemoryWin32HandleInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 ) + : handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) ); + } + + ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryWin32HandleInfoKHR ) ); + return *this; + } + ImportMemoryWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator const VkImportMemoryWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryGetWin32HandleInfoKHR + { + MemoryGetWin32HandleInfoKHR( DeviceMemory memory_ = DeviceMemory(), ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : memory( memory_ ) + , handleType( handleType_ ) + { + } + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) ); + } + + MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetWin32HandleInfoKHR ) ); + return *this; + } + MemoryGetWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryGetWin32HandleInfoKHR& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + MemoryGetWin32HandleInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkMemoryGetWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportMemoryFdInfoKHR + { + ImportMemoryFdInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = 0 ) + : handleType( handleType_ ) + , fd( fd_ ) + { + } + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) ); + } + + ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryFdInfoKHR ) ); + return *this; + } + ImportMemoryFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryFdInfoKHR& setFd( int fd_ ) + { + fd = fd_; + return *this; + } + + operator const VkImportMemoryFdInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryFdInfoKHR; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + int fd; + }; + static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); + + struct MemoryGetFdInfoKHR + { + MemoryGetFdInfoKHR( DeviceMemory memory_ = DeviceMemory(), ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + : memory( memory_ ) + , handleType( handleType_ ) + { + } + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) ); + } + + MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryGetFdInfoKHR ) ); + return *this; + } + MemoryGetFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryGetFdInfoKHR& setMemory( DeviceMemory memory_ ) + { + memory = memory_; + return *this; + } + + MemoryGetFdInfoKHR& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkMemoryGetFdInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryGetFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryGetFdInfoKHR; + + public: + const void* pNext = nullptr; + DeviceMemory memory; + ExternalMemoryHandleTypeFlagBits handleType; + }; + static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); + + struct ImportMemoryHostPointerInfoEXT + { + ImportMemoryHostPointerInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void* pHostPointer_ = nullptr ) + : handleType( handleType_ ) + , pHostPointer( pHostPointer_ ) + { + } + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) ); + } + + ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportMemoryHostPointerInfoEXT ) ); + return *this; + } + ImportMemoryHostPointerInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportMemoryHostPointerInfoEXT& setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportMemoryHostPointerInfoEXT& setPHostPointer( void* pHostPointer_ ) + { + pHostPointer = pHostPointer_; + return *this; + } + + operator const VkImportMemoryHostPointerInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( pHostPointer == rhs.pHostPointer ); + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + + public: + const void* pNext = nullptr; + ExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; + }; + static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); + + enum class ExternalMemoryFeatureFlagBits + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eDedicatedOnlyKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eExportableKHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + eImportableKHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + + using ExternalMemoryFeatureFlags = Flags; + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) + { + return ExternalMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) + { + return ~( ExternalMemoryFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable) + }; + }; + + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + struct ExternalMemoryProperties + { + operator const VkExternalMemoryProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalMemoryProperties const& rhs ) const + { + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalMemoryProperties const& rhs ) const + { + return !operator==( rhs ); + } + + ExternalMemoryFeatureFlags externalMemoryFeatures; + ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + ExternalMemoryHandleTypeFlags compatibleHandleTypes; + }; + static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); + + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + + struct ExternalImageFormatProperties + { + operator const VkExternalImageFormatProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalImageFormatProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalImageFormatProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalImageFormatProperties; + + public: + void* pNext = nullptr; + ExternalMemoryProperties externalMemoryProperties; + }; + static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); + + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ExternalBufferProperties + { + operator const VkExternalBufferProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalBufferProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalBufferProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalBufferProperties; + + public: + void* pNext = nullptr; + ExternalMemoryProperties externalMemoryProperties; + }; + static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); + + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + enum class ExternalSemaphoreHandleTypeFlagBits + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eOpaqueWin32KmtKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eD3D12FenceKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + eSyncFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT + }; + + using ExternalSemaphoreHandleTypeFlags = Flags; + + VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) + { + return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd) + }; + }; + + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + struct PhysicalDeviceExternalSemaphoreInfo + { + PhysicalDeviceExternalSemaphoreInfo( ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + : handleType( handleType_ ) + { + } + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) ); + } + + PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalSemaphoreInfo ) ); + return *this; + } + PhysicalDeviceExternalSemaphoreInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkPhysicalDeviceExternalSemaphoreInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + + public: + const void* pNext = nullptr; + ExternalSemaphoreHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + struct ExportSemaphoreCreateInfo + { + ExportSemaphoreCreateInfo( ExternalSemaphoreHandleTypeFlags handleTypes_ = ExternalSemaphoreHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) ); + } + + ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportSemaphoreCreateInfo ) ); + return *this; + } + ExportSemaphoreCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreCreateInfo& setHandleTypes( ExternalSemaphoreHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator const VkExportSemaphoreCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportSemaphoreCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportSemaphoreCreateInfo; + + public: + const void* pNext = nullptr; + ExternalSemaphoreHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SemaphoreGetWin32HandleInfoKHR + { + SemaphoreGetWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + : semaphore( semaphore_ ) + , handleType( handleType_ ) + { + } + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) ); + } + + SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetWin32HandleInfoKHR ) ); + return *this; + } + SemaphoreGetWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkSemaphoreGetWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + ExternalSemaphoreHandleTypeFlagBits handleType; + }; + static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SemaphoreGetFdInfoKHR + { + SemaphoreGetFdInfoKHR( Semaphore semaphore_ = Semaphore(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + : semaphore( semaphore_ ) + , handleType( handleType_ ) + { + } + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) ); + } + + SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SemaphoreGetFdInfoKHR ) ); + return *this; + } + SemaphoreGetFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SemaphoreGetFdInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkSemaphoreGetFdInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + ExternalSemaphoreHandleTypeFlagBits handleType; + }; + static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class ExternalSemaphoreFeatureFlagBits + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eExportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + eImportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + + using ExternalSemaphoreFeatureFlags = Flags; + + VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) + { + return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) + { + return ~( ExternalSemaphoreFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable) + }; + }; + + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + struct ExternalSemaphoreProperties + { + operator const VkExternalSemaphoreProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalSemaphoreProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); + } + + bool operator!=( ExternalSemaphoreProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalSemaphoreProperties; + + public: + void* pNext = nullptr; + ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + ExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + ExternalSemaphoreFeatureFlags externalSemaphoreFeatures; + }; + static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); + + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + enum class SemaphoreImportFlagBits + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + eTemporaryKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + + using SemaphoreImportFlags = Flags; + + VULKAN_HPP_INLINE SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) + { + return SemaphoreImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) + { + return ~( SemaphoreImportFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary) + }; + }; + + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportSemaphoreWin32HandleInfoKHR + { + ImportSemaphoreWin32HandleInfoKHR( Semaphore semaphore_ = Semaphore(), SemaphoreImportFlags flags_ = SemaphoreImportFlags(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 ) + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) ); + } + + ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreWin32HandleInfoKHR ) ); + return *this; + } + ImportSemaphoreWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setFlags( SemaphoreImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator const VkImportSemaphoreWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + SemaphoreImportFlags flags; + ExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; + }; + static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportSemaphoreFdInfoKHR + { + ImportSemaphoreFdInfoKHR( Semaphore semaphore_ = Semaphore(), SemaphoreImportFlags flags_ = SemaphoreImportFlags(), ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = 0 ) + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + { + } + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) ); + } + + ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportSemaphoreFdInfoKHR ) ); + return *this; + } + ImportSemaphoreFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setSemaphore( Semaphore semaphore_ ) + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setFlags( SemaphoreImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreFdInfoKHR& setFd( int fd_ ) + { + fd = fd_; + return *this; + } + + operator const VkImportSemaphoreFdInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + + public: + const void* pNext = nullptr; + Semaphore semaphore; + SemaphoreImportFlags flags; + ExternalSemaphoreHandleTypeFlagBits handleType; + int fd; + }; + static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class ExternalFenceHandleTypeFlagBits + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eOpaqueWin32KmtKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + eSyncFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + + using ExternalFenceHandleTypeFlags = Flags; + + VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) + { + return ExternalFenceHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) + { + return ~( ExternalFenceHandleTypeFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd) + }; + }; + + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + struct PhysicalDeviceExternalFenceInfo + { + PhysicalDeviceExternalFenceInfo( ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + : handleType( handleType_ ) + { + } + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) ); + } + + PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PhysicalDeviceExternalFenceInfo ) ); + return *this; + } + PhysicalDeviceExternalFenceInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalFenceInfo& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkPhysicalDeviceExternalFenceInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + + public: + const void* pNext = nullptr; + ExternalFenceHandleTypeFlagBits handleType; + }; + static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); + + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + + struct ExportFenceCreateInfo + { + ExportFenceCreateInfo( ExternalFenceHandleTypeFlags handleTypes_ = ExternalFenceHandleTypeFlags() ) + : handleTypes( handleTypes_ ) + { + } + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) ); + } + + ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( ExportFenceCreateInfo ) ); + return *this; + } + ExportFenceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ExportFenceCreateInfo& setHandleTypes( ExternalFenceHandleTypeFlags handleTypes_ ) + { + handleTypes = handleTypes_; + return *this; + } + + operator const VkExportFenceCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExportFenceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportFenceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExportFenceCreateInfo; + + public: + const void* pNext = nullptr; + ExternalFenceHandleTypeFlags handleTypes; + }; + static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); + + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct FenceGetWin32HandleInfoKHR + { + FenceGetWin32HandleInfoKHR( Fence fence_ = Fence(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + : fence( fence_ ) + , handleType( handleType_ ) + { + } + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) ); + } + + FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetWin32HandleInfoKHR ) ); + return *this; + } + FenceGetWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FenceGetWin32HandleInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + FenceGetWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkFenceGetWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + ExternalFenceHandleTypeFlagBits handleType; + }; + static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct FenceGetFdInfoKHR + { + FenceGetFdInfoKHR( Fence fence_ = Fence(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + : fence( fence_ ) + , handleType( handleType_ ) + { + } + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) ); + } + + FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( FenceGetFdInfoKHR ) ); + return *this; + } + FenceGetFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + FenceGetFdInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + FenceGetFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + operator const VkFenceGetFdInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( FenceGetFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eFenceGetFdInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + ExternalFenceHandleTypeFlagBits handleType; + }; + static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class ExternalFenceFeatureFlagBits + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eExportableKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + eImportableKHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + + using ExternalFenceFeatureFlags = Flags; + + VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) + { + return ExternalFenceFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) + { + return ~( ExternalFenceFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable) + }; + }; + + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + struct ExternalFenceProperties + { + operator const VkExternalFenceProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ExternalFenceProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalFenceFeatures == rhs.externalFenceFeatures ); + } + + bool operator!=( ExternalFenceProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eExternalFenceProperties; + + public: + void* pNext = nullptr; + ExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + ExternalFenceHandleTypeFlags compatibleHandleTypes; + ExternalFenceFeatureFlags externalFenceFeatures; + }; + static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); + + using ExternalFencePropertiesKHR = ExternalFenceProperties; + + enum class FenceImportFlagBits + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT, + eTemporaryKHR = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + + using FenceImportFlags = Flags; + + VULKAN_HPP_INLINE FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) + { + return FenceImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE FenceImportFlags operator~( FenceImportFlagBits bits ) + { + return ~( FenceImportFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(FenceImportFlagBits::eTemporary) + }; + }; + + using FenceImportFlagsKHR = FenceImportFlags; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportFenceWin32HandleInfoKHR + { + ImportFenceWin32HandleInfoKHR( Fence fence_ = Fence(), FenceImportFlags flags_ = FenceImportFlags(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = 0, LPCWSTR name_ = 0 ) + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) ); + } + + ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceWin32HandleInfoKHR ) ); + return *this; + } + ImportFenceWin32HandleInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setFlags( FenceImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setHandle( HANDLE handle_ ) + { + handle = handle_; + return *this; + } + + ImportFenceWin32HandleInfoKHR& setName( LPCWSTR name_ ) + { + name = name_; + return *this; + } + + operator const VkImportFenceWin32HandleInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + FenceImportFlags flags; + ExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; + }; + static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportFenceFdInfoKHR + { + ImportFenceFdInfoKHR( Fence fence_ = Fence(), FenceImportFlags flags_ = FenceImportFlags(), ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = 0 ) + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + { + } + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) ); + } + + ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( ImportFenceFdInfoKHR ) ); + return *this; + } + ImportFenceFdInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + ImportFenceFdInfoKHR& setFence( Fence fence_ ) + { + fence = fence_; + return *this; + } + + ImportFenceFdInfoKHR& setFlags( FenceImportFlags flags_ ) + { + flags = flags_; + return *this; + } + + ImportFenceFdInfoKHR& setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) + { + handleType = handleType_; + return *this; + } + + ImportFenceFdInfoKHR& setFd( int fd_ ) + { + fd = fd_; + return *this; + } + + operator const VkImportFenceFdInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ImportFenceFdInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportFenceFdInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eImportFenceFdInfoKHR; + + public: + const void* pNext = nullptr; + Fence fence; + FenceImportFlags flags; + ExternalFenceHandleTypeFlagBits handleType; + int fd; + }; + static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); + + enum class SurfaceCounterFlagBitsEXT + { + eVblank = VK_SURFACE_COUNTER_VBLANK_EXT + }; + + using SurfaceCounterFlagsEXT = Flags; + + VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) + { + return SurfaceCounterFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) + { + return ~( SurfaceCounterFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank) + }; + }; + + struct SurfaceCapabilities2EXT + { + operator const VkSurfaceCapabilities2EXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SurfaceCapabilities2EXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ) + && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); + } + + bool operator!=( SurfaceCapabilities2EXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSurfaceCapabilities2EXT; + + public: + void* pNext = nullptr; + uint32_t minImageCount; + uint32_t maxImageCount; + Extent2D currentExtent; + Extent2D minImageExtent; + Extent2D maxImageExtent; + uint32_t maxImageArrayLayers; + SurfaceTransformFlagsKHR supportedTransforms; + SurfaceTransformFlagBitsKHR currentTransform; + CompositeAlphaFlagsKHR supportedCompositeAlpha; + ImageUsageFlags supportedUsageFlags; + SurfaceCounterFlagsEXT supportedSurfaceCounters; + }; + static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); + + struct SwapchainCounterCreateInfoEXT + { + SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = SurfaceCounterFlagsEXT() ) + : surfaceCounters( surfaceCounters_ ) + { + } + + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) ); + } + + SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCounterCreateInfoEXT ) ); + return *this; + } + SwapchainCounterCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SwapchainCounterCreateInfoEXT& setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ ) + { + surfaceCounters = surfaceCounters_; + return *this; + } + + operator const VkSwapchainCounterCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceCounters == rhs.surfaceCounters ); + } + + bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + + public: + const void* pNext = nullptr; + SurfaceCounterFlagsEXT surfaceCounters; + }; + static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DisplayPowerStateEXT + { + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT + }; + + struct DisplayPowerInfoEXT + { + DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff ) + : powerState( powerState_ ) + { + } + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) ); + } + + DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayPowerInfoEXT ) ); + return *this; + } + DisplayPowerInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayPowerInfoEXT& setPowerState( DisplayPowerStateEXT powerState_ ) + { + powerState = powerState_; + return *this; + } + + operator const VkDisplayPowerInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayPowerInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( powerState == rhs.powerState ); + } + + bool operator!=( DisplayPowerInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayPowerInfoEXT; + + public: + const void* pNext = nullptr; + DisplayPowerStateEXT powerState; + }; + static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); + + enum class DeviceEventTypeEXT + { + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + }; + + struct DeviceEventInfoEXT + { + DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug ) + : deviceEvent( deviceEvent_ ) + { + } + + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) ); + } + + DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceEventInfoEXT ) ); + return *this; + } + DeviceEventInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceEventInfoEXT& setDeviceEvent( DeviceEventTypeEXT deviceEvent_ ) + { + deviceEvent = deviceEvent_; + return *this; + } + + operator const VkDeviceEventInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceEventInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceEvent == rhs.deviceEvent ); + } + + bool operator!=( DeviceEventInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceEventInfoEXT; + + public: + const void* pNext = nullptr; + DeviceEventTypeEXT deviceEvent; + }; + static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); + + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + struct DisplayEventInfoEXT + { + DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut ) + : displayEvent( displayEvent_ ) + { + } + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) ); + } + + DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DisplayEventInfoEXT ) ); + return *this; + } + DisplayEventInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DisplayEventInfoEXT& setDisplayEvent( DisplayEventTypeEXT displayEvent_ ) + { + displayEvent = displayEvent_; + return *this; + } + + operator const VkDisplayEventInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DisplayEventInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayEvent == rhs.displayEvent ); + } + + bool operator!=( DisplayEventInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDisplayEventInfoEXT; + + public: + const void* pNext = nullptr; + DisplayEventTypeEXT displayEvent; + }; + static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); + + enum class PeerMemoryFeatureFlagBits + { + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopySrcKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eCopyDstKHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericSrcKHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + eGenericDstKHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT + }; + + using PeerMemoryFeatureFlags = Flags; + + VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) + { + return PeerMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) + { + return ~( PeerMemoryFeatureFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst) + }; + }; + + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + enum class MemoryAllocateFlagBits + { + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceMaskKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT + }; + + using MemoryAllocateFlags = Flags; + + VULKAN_HPP_INLINE MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) + { + return MemoryAllocateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) + { + return ~( MemoryAllocateFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) + }; + }; + + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + struct MemoryAllocateFlagsInfo + { + MemoryAllocateFlagsInfo( MemoryAllocateFlags flags_ = MemoryAllocateFlags(), uint32_t deviceMask_ = 0 ) + : flags( flags_ ) + , deviceMask( deviceMask_ ) + { + } + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) ); + } + + MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( MemoryAllocateFlagsInfo ) ); + return *this; + } + MemoryAllocateFlagsInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + MemoryAllocateFlagsInfo& setFlags( MemoryAllocateFlags flags_ ) + { + flags = flags_; + return *this; + } + + MemoryAllocateFlagsInfo& setDeviceMask( uint32_t deviceMask_ ) + { + deviceMask = deviceMask_; + return *this; + } + + operator const VkMemoryAllocateFlagsInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( MemoryAllocateFlagsInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + + public: + const void* pNext = nullptr; + MemoryAllocateFlags flags; + uint32_t deviceMask; + }; + static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" ); + + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + enum class DeviceGroupPresentModeFlagBitsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + using DeviceGroupPresentModeFlagsKHR = Flags; + + VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) + { + return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) + }; + }; + + struct DeviceGroupPresentCapabilitiesKHR + { + operator const VkDeviceGroupPresentCapabilitiesKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + + public: + const void* pNext = nullptr; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + DeviceGroupPresentModeFlagsKHR modes; + }; + static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); + + struct DeviceGroupPresentInfoKHR + { + DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0, const uint32_t* pDeviceMasks_ = nullptr, DeviceGroupPresentModeFlagBitsKHR mode_ = DeviceGroupPresentModeFlagBitsKHR::eLocal ) + : swapchainCount( swapchainCount_ ) + , pDeviceMasks( pDeviceMasks_ ) + , mode( mode_ ) + { + } + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) ); + } + + DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupPresentInfoKHR ) ); + return *this; + } + DeviceGroupPresentInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupPresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ ) + { + swapchainCount = swapchainCount_; + return *this; + } + + DeviceGroupPresentInfoKHR& setPDeviceMasks( const uint32_t* pDeviceMasks_ ) + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + + DeviceGroupPresentInfoKHR& setMode( DeviceGroupPresentModeFlagBitsKHR mode_ ) + { + mode = mode_; + return *this; + } + + operator const VkDeviceGroupPresentInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pDeviceMasks == rhs.pDeviceMasks ) + && ( mode == rhs.mode ); + } + + bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + + public: + const void* pNext = nullptr; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + DeviceGroupPresentModeFlagBitsKHR mode; + }; + static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); + + struct DeviceGroupSwapchainCreateInfoKHR + { + DeviceGroupSwapchainCreateInfoKHR( DeviceGroupPresentModeFlagsKHR modes_ = DeviceGroupPresentModeFlagsKHR() ) + : modes( modes_ ) + { + } + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) ); + } + + DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupSwapchainCreateInfoKHR ) ); + return *this; + } + DeviceGroupSwapchainCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR& setModes( DeviceGroupPresentModeFlagsKHR modes_ ) + { + modes = modes_; + return *this; + } + + operator const VkDeviceGroupSwapchainCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + + public: + const void* pNext = nullptr; + DeviceGroupPresentModeFlagsKHR modes; + }; + static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + + enum class SwapchainCreateFlagBitsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR + }; + + using SwapchainCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) + { + return SwapchainCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) + { + return ~( SwapchainCreateFlagsKHR( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) + }; + }; + + struct SwapchainCreateInfoKHR + { + SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() ) + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + { + } + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) ); + } + + SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) + { + memcpy( this, &rhs, sizeof( SwapchainCreateInfoKHR ) ); + return *this; + } + SwapchainCreateInfoKHR& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ ) + { + flags = flags_; + return *this; + } + + SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ ) + { + surface = surface_; + return *this; + } + + SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ ) + { + minImageCount = minImageCount_; + return *this; + } + + SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ ) + { + imageFormat = imageFormat_; + return *this; + } + + SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ ) + { + imageColorSpace = imageColorSpace_; + return *this; + } + + SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ ) + { + imageExtent = imageExtent_; + return *this; + } + + SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ ) + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ ) + { + imageUsage = imageUsage_; + return *this; + } + + SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ ) + { + imageSharingMode = imageSharingMode_; + return *this; + } + + SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ ) + { + preTransform = preTransform_; + return *this; + } + + SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ ) + { + compositeAlpha = compositeAlpha_; + return *this; + } + + SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ ) + { + presentMode = presentMode_; + return *this; + } + + SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ ) + { + clipped = clipped_; + return *this; + } + + SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ ) + { + oldSwapchain = oldSwapchain_; + return *this; + } + + operator const VkSwapchainCreateInfoKHR&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SwapchainCreateInfoKHR const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( surface == rhs.surface ) + && ( minImageCount == rhs.minImageCount ) + && ( imageFormat == rhs.imageFormat ) + && ( imageColorSpace == rhs.imageColorSpace ) + && ( imageExtent == rhs.imageExtent ) + && ( imageArrayLayers == rhs.imageArrayLayers ) + && ( imageUsage == rhs.imageUsage ) + && ( imageSharingMode == rhs.imageSharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) + && ( preTransform == rhs.preTransform ) + && ( compositeAlpha == rhs.compositeAlpha ) + && ( presentMode == rhs.presentMode ) + && ( clipped == rhs.clipped ) + && ( oldSwapchain == rhs.oldSwapchain ); + } + + bool operator!=( SwapchainCreateInfoKHR const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSwapchainCreateInfoKHR; + + public: + const void* pNext = nullptr; + SwapchainCreateFlagsKHR flags; + SurfaceKHR surface; + uint32_t minImageCount; + Format imageFormat; + ColorSpaceKHR imageColorSpace; + Extent2D imageExtent; + uint32_t imageArrayLayers; + ImageUsageFlags imageUsage; + SharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + SurfaceTransformFlagBitsKHR preTransform; + CompositeAlphaFlagBitsKHR compositeAlpha; + PresentModeKHR presentMode; + Bool32 clipped; + SwapchainKHR oldSwapchain; + }; + static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + + enum class ViewportCoordinateSwizzleNV + { + ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, + ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, + eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, + ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, + eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, + ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, + eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV + }; + + struct ViewportSwizzleNV + { + ViewportSwizzleNV( ViewportCoordinateSwizzleNV x_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV y_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV z_ = ViewportCoordinateSwizzleNV::ePositiveX, ViewportCoordinateSwizzleNV w_ = ViewportCoordinateSwizzleNV::ePositiveX ) + : x( x_ ) + , y( y_ ) + , z( z_ ) + , w( w_ ) + { + } + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) ); + } + + ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) + { + memcpy( this, &rhs, sizeof( ViewportSwizzleNV ) ); + return *this; + } + ViewportSwizzleNV& setX( ViewportCoordinateSwizzleNV x_ ) + { + x = x_; + return *this; + } + + ViewportSwizzleNV& setY( ViewportCoordinateSwizzleNV y_ ) + { + y = y_; + return *this; + } + + ViewportSwizzleNV& setZ( ViewportCoordinateSwizzleNV z_ ) + { + z = z_; + return *this; + } + + ViewportSwizzleNV& setW( ViewportCoordinateSwizzleNV w_ ) + { + w = w_; + return *this; + } + + operator const VkViewportSwizzleNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( ViewportSwizzleNV const& rhs ) const + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ) + && ( w == rhs.w ); + } + + bool operator!=( ViewportSwizzleNV const& rhs ) const + { + return !operator==( rhs ); + } + + ViewportCoordinateSwizzleNV x; + ViewportCoordinateSwizzleNV y; + ViewportCoordinateSwizzleNV z; + ViewportCoordinateSwizzleNV w; + }; + static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); + + struct PipelineViewportSwizzleStateCreateInfoNV + { + PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateFlagsNV flags_ = PipelineViewportSwizzleStateCreateFlagsNV(), uint32_t viewportCount_ = 0, const ViewportSwizzleNV* pViewportSwizzles_ = nullptr ) + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewportSwizzles( pViewportSwizzles_ ) + { + } + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) ); + } + + PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) ); + return *this; + } + PipelineViewportSwizzleStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV& setFlags( PipelineViewportSwizzleStateCreateFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV& setViewportCount( uint32_t viewportCount_ ) + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV& setPViewportSwizzles( const ViewportSwizzleNV* pViewportSwizzles_ ) + { + pViewportSwizzles = pViewportSwizzles_; + return *this; + } + + operator const VkPipelineViewportSwizzleStateCreateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewportSwizzles == rhs.pViewportSwizzles ); + } + + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + + public: + const void* pNext = nullptr; + PipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const ViewportSwizzleNV* pViewportSwizzles; + }; + static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" ); + + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + struct PipelineDiscardRectangleStateCreateInfoEXT + { + PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateFlagsEXT flags_ = PipelineDiscardRectangleStateCreateFlagsEXT(), DiscardRectangleModeEXT discardRectangleMode_ = DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = 0, const Rect2D* pDiscardRectangles_ = nullptr ) + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( discardRectangleCount_ ) + , pDiscardRectangles( pDiscardRectangles_ ) + { + } + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) ); + } + + PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) ); + return *this; + } + PipelineDiscardRectangleStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setFlags( PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleMode( DiscardRectangleModeEXT discardRectangleMode_ ) + { + discardRectangleMode = discardRectangleMode_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setDiscardRectangleCount( uint32_t discardRectangleCount_ ) + { + discardRectangleCount = discardRectangleCount_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT& setPDiscardRectangles( const Rect2D* pDiscardRectangles_ ) + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + + operator const VkPipelineDiscardRectangleStateCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( discardRectangleMode == rhs.discardRectangleMode ) + && ( discardRectangleCount == rhs.discardRectangleCount ) + && ( pDiscardRectangles == rhs.pDiscardRectangles ); + } + + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + PipelineDiscardRectangleStateCreateFlagsEXT flags; + DiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const Rect2D* pDiscardRectangles; + }; + static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class SubpassDescriptionFlagBits + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX + }; + + using SubpassDescriptionFlags = Flags; + + VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) + { + return SubpassDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) + { + return ~( SubpassDescriptionFlags( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) + }; + }; + + struct SubpassDescription + { + SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = 0, const AttachmentReference* pInputAttachments_ = nullptr, uint32_t colorAttachmentCount_ = 0, const AttachmentReference* pColorAttachments_ = nullptr, const AttachmentReference* pResolveAttachments_ = nullptr, const AttachmentReference* pDepthStencilAttachment_ = nullptr, uint32_t preserveAttachmentCount_ = 0, const uint32_t* pPreserveAttachments_ = nullptr ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + { + } + + SubpassDescription( VkSubpassDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDescription ) ); + } + + SubpassDescription& operator=( VkSubpassDescription const & rhs ) + { + memcpy( this, &rhs, sizeof( SubpassDescription ) ); + return *this; + } + SubpassDescription& setFlags( SubpassDescriptionFlags flags_ ) + { + flags = flags_; + return *this; + } + + SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ ) + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ ) + { + pInputAttachments = pInputAttachments_; + return *this; + } + + SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ ) + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ ) + { + pColorAttachments = pColorAttachments_; + return *this; + } + + SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ ) + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + + SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ ) + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + + operator const VkSubpassDescription&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SubpassDescription const& rhs ) const + { + return ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( inputAttachmentCount == rhs.inputAttachmentCount ) + && ( pInputAttachments == rhs.pInputAttachments ) + && ( colorAttachmentCount == rhs.colorAttachmentCount ) + && ( pColorAttachments == rhs.pColorAttachments ) + && ( pResolveAttachments == rhs.pResolveAttachments ) + && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) + && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) + && ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription const& rhs ) const + { + return !operator==( rhs ); + } + + SubpassDescriptionFlags flags; + PipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const AttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const AttachmentReference* pColorAttachments; + const AttachmentReference* pResolveAttachments; + const AttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; + }; + static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); + + struct RenderPassCreateInfo + { + RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr ) + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + { + } + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) ); + } + + RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( RenderPassCreateInfo ) ); + return *this; + } + RenderPassCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ ) + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ ) + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ ) + { + pAttachments = pAttachments_; + return *this; + } + + RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ ) + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ ) + { + pSubpasses = pSubpasses_; + return *this; + } + + RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ ) + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ ) + { + pDependencies = pDependencies_; + return *this; + } + + operator const VkRenderPassCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( RenderPassCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( subpassCount == rhs.subpassCount ) + && ( pSubpasses == rhs.pSubpasses ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pDependencies == rhs.pDependencies ); + } + + bool operator!=( RenderPassCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eRenderPassCreateInfo; + + public: + const void* pNext = nullptr; + RenderPassCreateFlags flags; + uint32_t attachmentCount; + const AttachmentDescription* pAttachments; + uint32_t subpassCount; + const SubpassDescription* pSubpasses; + uint32_t dependencyCount; + const SubpassDependency* pDependencies; + }; + static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); + + enum class PointClippingBehavior + { + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eAllClipPlanesKHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + eUserClipPlanesOnlyKHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY + }; + + struct PhysicalDevicePointClippingProperties + { + operator const VkPhysicalDevicePointClippingProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pointClippingBehavior == rhs.pointClippingBehavior ); + } + + bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + + public: + void* pNext = nullptr; + PointClippingBehavior pointClippingBehavior; + }; + static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" ); + + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + + enum class SamplerReductionModeEXT + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT + }; + + struct SamplerReductionModeCreateInfoEXT + { + SamplerReductionModeCreateInfoEXT( SamplerReductionModeEXT reductionMode_ = SamplerReductionModeEXT::eWeightedAverage ) + : reductionMode( reductionMode_ ) + { + } + + SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) ); + } + + SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerReductionModeCreateInfoEXT ) ); + return *this; + } + SamplerReductionModeCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerReductionModeCreateInfoEXT& setReductionMode( SamplerReductionModeEXT reductionMode_ ) + { + reductionMode = reductionMode_; + return *this; + } + + operator const VkSamplerReductionModeCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( reductionMode == rhs.reductionMode ); + } + + bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT; + + public: + const void* pNext = nullptr; + SamplerReductionModeEXT reductionMode; + }; + static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class TessellationDomainOrigin + { + eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eUpperLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + eLowerLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT + }; + + struct PipelineTessellationDomainOriginStateCreateInfo + { + PipelineTessellationDomainOriginStateCreateInfo( TessellationDomainOrigin domainOrigin_ = TessellationDomainOrigin::eUpperLeft ) + : domainOrigin( domainOrigin_ ) + { + } + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) ); + } + + PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) ); + return *this; + } + PipelineTessellationDomainOriginStateCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo& setDomainOrigin( TessellationDomainOrigin domainOrigin_ ) + { + domainOrigin = domainOrigin_; + return *this; + } + + operator const VkPipelineTessellationDomainOriginStateCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( domainOrigin == rhs.domainOrigin ); + } + + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + + public: + const void* pNext = nullptr; + TessellationDomainOrigin domainOrigin; + }; + static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" ); + + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + + enum class SamplerYcbcrModelConversion + { + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eRgbIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcrIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr709KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr601KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + eYcbcr2020KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + }; + + enum class SamplerYcbcrRange + { + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuFullKHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + eItuNarrowKHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW + }; + + enum class ChromaLocation + { + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eCositedEvenKHR = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT, + eMidpointKHR = VK_CHROMA_LOCATION_MIDPOINT + }; + + struct SamplerYcbcrConversionCreateInfo + { + SamplerYcbcrConversionCreateInfo( Format format_ = Format::eUndefined, SamplerYcbcrModelConversion ycbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity, SamplerYcbcrRange ycbcrRange_ = SamplerYcbcrRange::eItuFull, ComponentMapping components_ = ComponentMapping(), ChromaLocation xChromaOffset_ = ChromaLocation::eCositedEven, ChromaLocation yChromaOffset_ = ChromaLocation::eCositedEven, Filter chromaFilter_ = Filter::eNearest, Bool32 forceExplicitReconstruction_ = 0 ) + : format( format_ ) + , ycbcrModel( ycbcrModel_ ) + , ycbcrRange( ycbcrRange_ ) + , components( components_ ) + , xChromaOffset( xChromaOffset_ ) + , yChromaOffset( yChromaOffset_ ) + , chromaFilter( chromaFilter_ ) + , forceExplicitReconstruction( forceExplicitReconstruction_ ) + { + } + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) ); + } + + SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SamplerYcbcrConversionCreateInfo ) ); + return *this; + } + SamplerYcbcrConversionCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setFormat( Format format_ ) + { + format = format_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setYcbcrModel( SamplerYcbcrModelConversion ycbcrModel_ ) + { + ycbcrModel = ycbcrModel_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setYcbcrRange( SamplerYcbcrRange ycbcrRange_ ) + { + ycbcrRange = ycbcrRange_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setComponents( ComponentMapping components_ ) + { + components = components_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setXChromaOffset( ChromaLocation xChromaOffset_ ) + { + xChromaOffset = xChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setYChromaOffset( ChromaLocation yChromaOffset_ ) + { + yChromaOffset = yChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setChromaFilter( Filter chromaFilter_ ) + { + chromaFilter = chromaFilter_; + return *this; + } + + SamplerYcbcrConversionCreateInfo& setForceExplicitReconstruction( Bool32 forceExplicitReconstruction_ ) + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } + + operator const VkSamplerYcbcrConversionCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( ycbcrModel == rhs.ycbcrModel ) + && ( ycbcrRange == rhs.ycbcrRange ) + && ( components == rhs.components ) + && ( xChromaOffset == rhs.xChromaOffset ) + && ( yChromaOffset == rhs.yChromaOffset ) + && ( chromaFilter == rhs.chromaFilter ) + && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + + public: + const void* pNext = nullptr; + Format format; + SamplerYcbcrModelConversion ycbcrModel; + SamplerYcbcrRange ycbcrRange; + ComponentMapping components; + ChromaLocation xChromaOffset; + ChromaLocation yChromaOffset; + Filter chromaFilter; + Bool32 forceExplicitReconstruction; + }; + static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); + + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + struct AndroidHardwareBufferFormatPropertiesANDROID + { + operator const VkAndroidHardwareBufferFormatPropertiesANDROID&() const + { + return *reinterpret_cast(this); + } + + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( externalFormat == rhs.externalFormat ) + && ( formatFeatures == rhs.formatFeatures ) + && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) + && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) + && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) + && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) + && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); + } + + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + + public: + void* pNext = nullptr; + Format format; + uint64_t externalFormat; + FormatFeatureFlags formatFeatures; + ComponentMapping samplerYcbcrConversionComponents; + SamplerYcbcrModelConversion suggestedYcbcrModel; + SamplerYcbcrRange suggestedYcbcrRange; + ChromaLocation suggestedXChromaOffset; + ChromaLocation suggestedYChromaOffset; + }; + static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" ); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + struct PipelineColorBlendAdvancedStateCreateInfoEXT + { + PipelineColorBlendAdvancedStateCreateInfoEXT( Bool32 srcPremultiplied_ = 0, Bool32 dstPremultiplied_ = 0, BlendOverlapEXT blendOverlap_ = BlendOverlapEXT::eUncorrelated ) + : srcPremultiplied( srcPremultiplied_ ) + , dstPremultiplied( dstPremultiplied_ ) + , blendOverlap( blendOverlap_ ) + { + } + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) ); + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) ); + return *this; + } + PipelineColorBlendAdvancedStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& setSrcPremultiplied( Bool32 srcPremultiplied_ ) + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& setDstPremultiplied( Bool32 dstPremultiplied_ ) + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& setBlendOverlap( BlendOverlapEXT blendOverlap_ ) + { + blendOverlap = blendOverlap_; + return *this; + } + + operator const VkPipelineColorBlendAdvancedStateCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcPremultiplied == rhs.srcPremultiplied ) + && ( dstPremultiplied == rhs.dstPremultiplied ) + && ( blendOverlap == rhs.blendOverlap ); + } + + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + Bool32 srcPremultiplied; + Bool32 dstPremultiplied; + BlendOverlapEXT blendOverlap; + }; + static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + struct PipelineCoverageModulationStateCreateInfoNV + { + PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateFlagsNV flags_ = PipelineCoverageModulationStateCreateFlagsNV(), CoverageModulationModeNV coverageModulationMode_ = CoverageModulationModeNV::eNone, Bool32 coverageModulationTableEnable_ = 0, uint32_t coverageModulationTableCount_ = 0, const float* pCoverageModulationTable_ = nullptr ) + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( coverageModulationTableCount_ ) + , pCoverageModulationTable( pCoverageModulationTable_ ) + { + } + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) ); + } + + PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineCoverageModulationStateCreateInfoNV ) ); + return *this; + } + PipelineCoverageModulationStateCreateInfoNV& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setFlags( PipelineCoverageModulationStateCreateFlagsNV flags_ ) + { + flags = flags_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationMode( CoverageModulationModeNV coverageModulationMode_ ) + { + coverageModulationMode = coverageModulationMode_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableEnable( Bool32 coverageModulationTableEnable_ ) + { + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV& setPCoverageModulationTable( const float* pCoverageModulationTable_ ) + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + + operator const VkPipelineCoverageModulationStateCreateInfoNV&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( coverageModulationMode == rhs.coverageModulationMode ) + && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) + && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) + && ( pCoverageModulationTable == rhs.pCoverageModulationTable ); + } + + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + + public: + const void* pNext = nullptr; + PipelineCoverageModulationStateCreateFlagsNV flags; + CoverageModulationModeNV coverageModulationMode; + Bool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; + }; + static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" ); + + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + + enum class QueueGlobalPriorityEXT + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT + }; + + struct DeviceQueueGlobalPriorityCreateInfoEXT + { + DeviceQueueGlobalPriorityCreateInfoEXT( QueueGlobalPriorityEXT globalPriority_ = QueueGlobalPriorityEXT::eLow ) + : globalPriority( globalPriority_ ) + { + } + + DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) ); + } + + DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) ); + return *this; + } + DeviceQueueGlobalPriorityCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT& setGlobalPriority( QueueGlobalPriorityEXT globalPriority_ ) + { + globalPriority = globalPriority_; + return *this; + } + + operator const VkDeviceQueueGlobalPriorityCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( globalPriority == rhs.globalPriority ); + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + + public: + const void* pNext = nullptr; + QueueGlobalPriorityEXT globalPriority; + }; + static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DebugUtilsMessageSeverityFlagBitsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) + { + return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError) + }; + }; + + enum class DebugUtilsMessageTypeFlagBitsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT + }; + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) + { + return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance) + }; + }; + + struct DebugUtilsMessengerCreateInfoEXT + { + DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT flags_ = DebugUtilsMessengerCreateFlagsEXT(), DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = DebugUtilsMessageSeverityFlagsEXT(), DebugUtilsMessageTypeFlagsEXT messageType_ = DebugUtilsMessageTypeFlagsEXT(), PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr, void* pUserData_ = nullptr ) + : flags( flags_ ) + , messageSeverity( messageSeverity_ ) + , messageType( messageType_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + { + } + + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) ); + } + + DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DebugUtilsMessengerCreateInfoEXT ) ); + return *this; + } + DebugUtilsMessengerCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setFlags( DebugUtilsMessengerCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setMessageSeverity( DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) + { + messageSeverity = messageSeverity_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setMessageType( DebugUtilsMessageTypeFlagsEXT messageType_ ) + { + messageType = messageType_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT& setPUserData( void* pUserData_ ) + { + pUserData = pUserData_; + return *this; + } + + operator const VkDebugUtilsMessengerCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( messageSeverity == rhs.messageSeverity ) + && ( messageType == rhs.messageType ) + && ( pfnUserCallback == rhs.pfnUserCallback ) + && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + + public: + const void* pNext = nullptr; + DebugUtilsMessengerCreateFlagsEXT flags; + DebugUtilsMessageSeverityFlagsEXT messageSeverity; + DebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; + }; + static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + struct PipelineRasterizationConservativeStateCreateInfoEXT + { + PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = PipelineRasterizationConservativeStateCreateFlagsEXT(), ConservativeRasterizationModeEXT conservativeRasterizationMode_ = ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = 0 ) + : flags( flags_ ) + , conservativeRasterizationMode( conservativeRasterizationMode_ ) + , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + { + } + + PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) ); + } + + PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) ); + return *this; + } + PipelineRasterizationConservativeStateCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT& setFlags( PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) + { + flags = flags_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT& setConservativeRasterizationMode( ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) + { + conservativeRasterizationMode = conservativeRasterizationMode_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT& setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) + { + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + return *this; + } + + operator const VkPipelineRasterizationConservativeStateCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) + && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); + } + + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + + public: + const void* pNext = nullptr; + PipelineRasterizationConservativeStateCreateFlagsEXT flags; + ConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; + }; + static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" ); + + enum class DescriptorBindingFlagBitsEXT + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT + }; + + using DescriptorBindingFlagsEXT = Flags; + + VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) + { + return DescriptorBindingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits ) + { + return ~( DescriptorBindingFlagsEXT( bits ) ); + } + + template <> struct FlagTraits + { + enum + { + allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) + }; + }; + + struct DescriptorSetLayoutBindingFlagsCreateInfoEXT + { + DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0, const DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr ) + : bindingCount( bindingCount_ ) + , pBindingFlags( pBindingFlags_ ) + { + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) ); + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) + { + memcpy( this, &rhs, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) ); + return *this; + } + DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT& setBindingCount( uint32_t bindingCount_ ) + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT& setPBindingFlags( const DescriptorBindingFlagsEXT* pBindingFlags_ ) + { + pBindingFlags = pBindingFlags_; + return *this; + } + + operator const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( bindingCount == rhs.bindingCount ) + && ( pBindingFlags == rhs.pBindingFlags ); + } + + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT; + + public: + const void* pNext = nullptr; + uint32_t bindingCount; + const DescriptorBindingFlagsEXT* pBindingFlags; + }; + static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" ); + + template + Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type enumerateInstanceVersion(Dispatch const &d = Dispatch() ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d) + { + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type enumerateInstanceVersion(Dispatch const &d ) + { + uint32_t apiVersion; + Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d = Dispatch() ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d) + { + return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template + Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = Dispatch() ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d) + { + return static_cast( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + // forward declarations + struct CmdProcessCommandsInfoNVX; + + class CommandBuffer + { + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) + : m_commandBuffer( commandBuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandBuffer & operator=(VkCommandBuffer commandBuffer) + { + m_commandBuffer = commandBuffer; + return *this; + } +#endif + + CommandBuffer & operator=( std::nullptr_t ) + { + m_commandBuffer = VK_NULL_HANDLE; + return *this; + } + + bool operator==( CommandBuffer const & rhs ) const + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=(CommandBuffer const & rhs ) const + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<(CommandBuffer const & rhs ) const + { + return m_commandBuffer < rhs.m_commandBuffer; + } + + template + Result begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result end(Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type end(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type reset( CommandBufferResetFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d = Dispatch() ) const; + + template + void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLineWidth( float lineWidth, Dispatch const &d = Dispatch() ) const; + + template + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = Dispatch() ) const; + + template + void setBlendConstants( const float blendConstants[4], Dispatch const &d = Dispatch() ) const; + + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = Dispatch() ) const; + + template + void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = Dispatch() ) const; + + template + void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = Dispatch() ) const; + + template + void setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = Dispatch() ) const; + + template + void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d = Dispatch() ) const; + + template + void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const; + + template + void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = Dispatch() ) const; + + template + void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; + + template + void dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d = Dispatch() ) const; + + template + void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Filter filter, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy data, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d = Dispatch() ) const; + + template + void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const; + + template + void resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d = Dispatch() ) const; + + template + void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( ArrayProxy events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d = Dispatch() ) const; + + template + void endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const; + + template + void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = Dispatch() ) const; + + template + void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d = Dispatch() ) const; + + template + void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; + + template + void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass( SubpassContents contents, Dispatch const &d = Dispatch() ) const; + + template + void endRenderPass(Dispatch const &d = Dispatch() ) const; + + template + void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerEndEXT(Dispatch const &d = Dispatch() ) const; + + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = Dispatch() ) const; + + template + void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDeviceMask( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const; + + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = Dispatch() ) const; + + template + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; + + template + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = Dispatch() ) const; + + template + void pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = Dispatch() ) const; + + template + void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const; + + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d = Dispatch() ) const; + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const + { + return m_commandBuffer; + } + + explicit operator bool() const + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer; + }; + + static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE Result CommandBuffer::begin( const CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const + { + return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const + { + return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::end(Dispatch const &d ) const + { + Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d) const + { + return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type CommandBuffer::reset( CommandBufferResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d) const + { + d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline, Dispatch const &d ) const + { + d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports, Dispatch const &d) const + { + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d ) const + { + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors, Dispatch const &d) const + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d ) const + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const + { + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d ) const + { + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d) const + { + d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), offset, static_cast( indexType ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType, Dispatch const &d ) const + { + d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), offset, static_cast( indexType ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets, Dispatch const &d) const + { + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), pOffsets ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +#else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), offsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), offset, drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d) const + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), offset ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset, Dispatch const &d ) const + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), offset ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter, Dispatch const &d) const + { + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Filter filter, Dispatch const &d ) const + { + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions, Dispatch const &d) const + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData, Dispatch const &d) const + { + d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, dataSize, pData ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy data, Dispatch const &d ) const + { + d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d) const + { + d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, size, data ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data, Dispatch const &d ) const + { + d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), dstOffset, size, data ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const + { + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d ) const + { + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges, Dispatch const &d) const + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d ) const + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects, Dispatch const &d) const + { + d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d ) const + { + d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions, Dispatch const &d) const + { + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const + { + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d) const + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask, Dispatch const &d ) const + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const + { + d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const + { + d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const + { + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const + { + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d) const + { + d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags, Dispatch const &d ) const + { + d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d) const + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query, Dispatch const &d ) const + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d) const + { + d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query, Dispatch const &d ) const + { + d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), dstOffset, stride, static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), dstOffset, stride, static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const + { + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d ) const + { + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents, Dispatch const &d) const + { + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents, Dispatch const &d ) const + { + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d) const + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents, Dispatch const &d ) const + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy commandBuffers, Dispatch const &d ) const + { + d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const + { + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const + { + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast( buffer ), offset, static_cast( countBuffer ), countBufferOffset, maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const + { + d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( pProcessCommandsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const + { + d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( &processCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const + { + d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( pReserveSpaceInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const + { + d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( &reserveSpaceInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d ) const + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const + { + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const + { + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const + { + d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const + { + d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d ) const + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const Rect2D* pDiscardRectangles, Dispatch const &d) const + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d ) const + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), dstOffset, marker ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( PipelineStageFlagBits pipelineStage, Buffer dstBuffer, DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), dstOffset, marker ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + struct SubmitInfo + { + SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr ) + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , pWaitDstStageMask( pWaitDstStageMask_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBuffers( pCommandBuffers_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + { + } + + SubmitInfo( VkSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SubmitInfo ) ); + } + + SubmitInfo& operator=( VkSubmitInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( SubmitInfo ) ); + return *this; + } + SubmitInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ ) + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ ) + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + + SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ ) + { + commandBufferCount = commandBufferCount_; + return *this; + } + + SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ ) + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + + SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ ) + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + + operator const VkSubmitInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( SubmitInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBuffers == rhs.pCommandBuffers ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( SubmitInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eSubmitInfo; + + public: + const void* pNext = nullptr; + uint32_t waitSemaphoreCount; + const Semaphore* pWaitSemaphores; + const PipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const CommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const Semaphore* pSignalSemaphores; + }; + static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); + + class Queue + { + public: + VULKAN_HPP_CONSTEXPR Queue() + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) + : m_queue( queue ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Queue & operator=(VkQueue queue) + { + m_queue = queue; + return *this; + } +#endif + + Queue & operator=( std::nullptr_t ) + { + m_queue = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Queue const & rhs ) const + { + return m_queue == rhs.m_queue; + } + + bool operator!=(Queue const & rhs ) const + { + return m_queue != rhs.m_queue; + } + + bool operator<(Queue const & rhs ) const + { + return m_queue < rhs.m_queue; + } + + template + Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type submit( ArrayProxy submits, Fence fence, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitIdle(Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type waitIdle(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindSparse( ArrayProxy bindInfo, Fence fence, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT(Dispatch const &d = Dispatch() ) const; + + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const + { + return m_queue; + } + + explicit operator bool() const + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue; + }; + + static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence, Dispatch const &d) const + { + return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Queue::submit( ArrayProxy submits, Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const + { + return static_cast( d.vkQueueWaitIdle( m_queue ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Queue::waitIdle(Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence, Dispatch const &d) const + { + return static_cast( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Queue::bindSparse( ArrayProxy bindInfo, Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR* pPresentInfo, Dispatch const &d) const + { + return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } +#else + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Device; + + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueBuffer = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueBufferView = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = PoolFree; }; + using UniqueCommandBuffer = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueCommandPool = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDescriptorPool = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = PoolFree; }; + using UniqueDescriptorSet = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDescriptorSetLayout = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectFree; }; + using UniqueDeviceMemory = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueEvent = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueFence = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueFramebuffer = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueImage = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueImageView = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueIndirectCommandsLayoutNVX = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueObjectTableNVX = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniquePipeline = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniquePipelineCache = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniquePipelineLayout = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueQueryPool = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueRenderPass = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSampler = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSamplerYcbcrConversion = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSemaphore = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueShaderModule = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSwapchainKHR = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueValidationCacheEXT = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Device + { + public: + VULKAN_HPP_CONSTEXPR Device() + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) + : m_device( device ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Device & operator=(VkDevice device) + { + m_device = device; + return *this; + } +#endif + + Device & operator=( std::nullptr_t ) + { + m_device = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Device const & rhs ) const + { + return m_device == rhs.m_device; + } + + bool operator!=(Device const & rhs ) const + { + return m_device != rhs.m_device; + } + + bool operator<(Device const & rhs ) const + { + return m_device < rhs.m_device; + } + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitIdle(Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type waitIdle(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void unmapMemory( DeviceMemory memory, Dispatch const &d = Dispatch() ) const; + + template + Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceSize getMemoryCommitment( DeviceMemory memory, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements getBufferMemoryRequirements( Buffer buffer, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements getImageMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements( Image image, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( Fence fence, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Fence fence, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type resetFences( ArrayProxy fences, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getFenceStatus( Fence fence, Dispatch const &d = Dispatch() ) const; + + template + Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitForFences( ArrayProxy fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( Event event, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Event event, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getEventStatus( Event event, Dispatch const &d = Dispatch() ) const; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result setEvent( Event event, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type setEvent( Event event, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetEvent( Event event, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type resetEvent( Event event, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( Buffer buffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Buffer buffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferView( BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( Image image, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Image image, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( ImageView imageView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ImageView imageView, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderModule( ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + ResultValueType::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + ResultValueType::type createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + ResultValueType::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + ResultValueType::type createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( Sampler sampler, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Sampler sampler, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type free( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFramebuffer( Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyRenderPass( RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Extent2D getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCommandPool( CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; + template , typename Dispatch = DispatchLoaderStatic> + ResultValueType::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySwapchainKHR( SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValue acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_NV + template + Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + template + Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const; + + template + void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = Dispatch() ) const; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const; + + template + void updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = Dispatch() ) const; + + template + void setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; + + template + Result getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; + template + StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; + template + StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; + template + StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + Result getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const + { + return m_device; + } + + explicit operator bool() const + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device; + }; + + static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const + { + return d.vkGetDeviceProcAddr( m_device, pName ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const + { + return d.vkGetDeviceProcAddr( m_device, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue, Dispatch const &d) const + { + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const + { + Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const + { + return static_cast( d.vkDeviceWaitIdle( m_device ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::waitIdle(Dispatch const &d ) const + { + Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory, Dispatch const &d) const + { + return static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + { + DeviceMemory memory; + Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + { + DeviceMemory memory; + Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); + + ObjectFree deleter( *this, allocator ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, Optional allocator, Dispatch const &d ) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( DeviceMemory memory, Optional allocator, Dispatch const &d ) const + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData, Dispatch const &d) const + { + return static_cast( d.vkMapMemory( m_device, static_cast( memory ), offset, size, static_cast( flags ), ppData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, Dispatch const &d ) const + { + void* pData; + Result result = static_cast( d.vkMapMemory( m_device, static_cast( memory ), offset, size, static_cast( flags ), &pData ) ); + return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d) const + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory, Dispatch const &d ) const + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const + { + return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + { + Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges, Dispatch const &d) const + { + return static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + { + Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const + { + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), pCommittedMemoryInBytes ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceSize Device::getMemoryCommitment( DeviceMemory memory, Dispatch const &d ) const + { + DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), &committedMemoryInBytes ); + return committedMemoryInBytes; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements Device::getBufferMemoryRequirements( Buffer buffer, Dispatch const &d ) const + { + MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const + { + return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), memoryOffset ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), memoryOffset ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements Device::getImageMemoryRequirements( Image image, Dispatch const &d ) const + { + MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d) const + { + return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), memoryOffset ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), memoryOffset ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( Image image, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const + { + return static_cast( d.vkCreateFence( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Fence fence, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Fence fence, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const Fence* pFences, Dispatch const &d) const + { + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetFences( ArrayProxy fences, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetFences( m_device, fences.size() , reinterpret_cast( fences.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d) const + { + return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout, Dispatch const &d) const + { + return static_cast( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), waitAll, timeout ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const + { + Result result = static_cast( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast( fences.data() ), waitAll, timeout ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore, Dispatch const &d) const + { + return static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Semaphore semaphore; + Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Semaphore semaphore; + Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Semaphore semaphore, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent, Dispatch const &d) const + { + return static_cast( d.vkCreateEvent( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Event event; + Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Event event; + Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyEvent( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyEvent( Event event, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Event event, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Event event, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d) const + { + return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getEventStatus( Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::setEvent( Event event, Dispatch const &d) const + { + return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::setEvent( Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetEvent( Event event, Dispatch const &d) const + { + return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetEvent( Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool, Dispatch const &d) const + { + return static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + QueryPool queryPool; + Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + QueryPool queryPool; + Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( QueryPool queryPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags, Dispatch const &d) const + { + return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast( flags ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, DeviceSize stride, QueryResultFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), stride, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer, Dispatch const &d) const + { + return static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Buffer buffer; + Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Buffer buffer; + Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Buffer buffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView, Dispatch const &d) const + { + return static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + BufferView view; + Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + BufferView view; + Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( BufferView bufferView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage, Dispatch const &d) const + { + return static_cast( d.vkCreateImage( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Image image; + Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Image image; + Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImage( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImage( Image image, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Image image, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Image image, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout, Dispatch const &d) const + { + d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE SubresourceLayout Device::getImageSubresourceLayout( Image image, const ImageSubresource & subresource, Dispatch const &d ) const + { + SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( &subresource ), reinterpret_cast( &layout ) ); + return layout; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView, Dispatch const &d) const + { + return static_cast( d.vkCreateImageView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ImageView view; + Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ImageView view; + Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ImageView imageView, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule, Dispatch const &d) const + { + return static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ShaderModule shaderModule; + Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + ShaderModule shaderModule; + Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache, Dispatch const &d) const + { + return static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineCache pipelineCache; + Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineCache pipelineCache; + Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const + { + return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( PipelineCache pipelineCache, Dispatch const &d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches, Dispatch const &d) const + { + return static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::mergePipelineCaches( PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + { + Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const + { + return static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); + } + template + VULKAN_HPP_INLINE ResultValueType::type Device::createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE ResultValueType::type Device::createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines, Dispatch const &d) const + { + return static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); + } + template + VULKAN_HPP_INLINE ResultValueType::type Device::createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniquePipeline ), "Pipeline is greater than UniquePipeline!" ); + std::vector pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniquePipeline ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE ResultValueType::type Device::createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Pipeline pipeline, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout, Dispatch const &d) const + { + return static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineLayout pipelineLayout; + Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + PipelineLayout pipelineLayout; + Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler, Dispatch const &d) const + { + return static_cast( d.vkCreateSampler( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Sampler sampler; + Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Sampler sampler; + Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Sampler sampler, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorSetLayout setLayout; + Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorSetLayout setLayout; + Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorPool descriptorPool; + Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorPool descriptorPool; + Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d) const + { + return static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets, Dispatch const &d) const + { + return static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + { + static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueDescriptorSet ), "DescriptorSet is greater than UniqueDescriptorSet!" ); + std::vector descriptorSets; + descriptorSets.reserve( allocateInfo.descriptorSetCount ); + DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueDescriptorSet ) - sizeof( DescriptorSet ) ) ); + Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + + PoolFree deleter( *this, allocateInfo.descriptorPool ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const + { + return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + { + Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::free( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, Dispatch const &d) const + { + return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::free( DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + { + Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const + { + d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d ) const + { + d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer, Dispatch const &d) const + { + return static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Framebuffer framebuffer; + Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Framebuffer framebuffer; + Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass, Dispatch const &d) const + { + return static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( RenderPass renderPass, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity, Dispatch const &d) const + { + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Extent2D Device::getRenderAreaGranularity( RenderPass renderPass, Dispatch const &d ) const + { + Extent2D granularity; + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + return granularity; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool, Dispatch const &d) const + { + return static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + CommandPool commandPool; + Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + CommandPool commandPool; + Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( CommandPool commandPool, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d) const + { + return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + return static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + { + static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueCommandBuffer ), "CommandBuffer is greater than UniqueCommandBuffer!" ); + std::vector commandBuffers; + commandBuffers.reserve( allocateInfo.commandBufferCount ); + CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueCommandBuffer ) - sizeof( CommandBuffer ) ) ); + Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + + PoolFree deleter( *this, allocateInfo.commandPool ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers, Dispatch const &d) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains, Dispatch const &d) const + { + return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchains ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector swapchains( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); + } + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueSwapchainKHR ), "SwapchainKHR is greater than UniqueSwapchainKHR!" ); + std::vector swapchainKHRs; + swapchainKHRs.reserve( createInfos.size() ); + SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueSwapchainKHR ) - sizeof( SwapchainKHR ) ) ); + Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + + ObjectDestroy deleter( *this, allocator ); + for ( size_t i=0 ; i + VULKAN_HPP_INLINE ResultValueType::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain, Dispatch const &d) const + { + return static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages, Dispatch const &d) const + { + return static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain, Dispatch const &d ) const + { + std::vector swapchainImages; + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + swapchainImages.resize( swapchainImageCount ); + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex, Dispatch const &d) const + { + return static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, Dispatch const &d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); + return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const + { + return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const + { + return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_NV + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + + template + VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const + { + return static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + IndirectCommandsLayoutNVX indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + IndirectCommandsLayoutNVX indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable, Dispatch const &d) const + { + return static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pObjectTable ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + ObjectTableNVX objectTable; + Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); + return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + ObjectTableNVX objectTable; + Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const + { + return static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( ppObjectTableEntries ), pObjectIndices ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() ); +#else + if ( pObjectTableEntries.size() != objectIndices.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + Result result = static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), pObjectTableEntries.size() , reinterpret_cast( pObjectTableEntries.data() ), objectIndices.data() ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const + { + return static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( pObjectEntryTypes ), pObjectIndices ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() ); +#else + if ( objectEntryTypes.size() != objectIndices.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + Result result = static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectEntryTypes.size() , reinterpret_cast( objectEntryTypes.data() ), objectIndices.data() ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::trimCommandPool( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d) const + { + d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlags flags, Dispatch const &d ) const + { + d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const + { + MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); + return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryFdPropertiesKHR( ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const + { + MemoryFdPropertiesKHR memoryFdProperties; + Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); + return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const + { + return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const + { + return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const + { + return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const + { + return static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const + { + return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const + { + return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const + { + return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const + { + return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const + { + return static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence, Dispatch const &d) const + { + return static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const + { + Fence fence; + Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const + { + return static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const + { + uint64_t counterValue; + Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); + return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const + { + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const + { + PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const + { + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const + { + PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const BindImageMemoryInfo* pBindInfos, Dispatch const &d) const + { + return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const + { + DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const + { + return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getGroupSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const + { + return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const + { + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#else + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const + { + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#else + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( DescriptorSet descriptorSet, DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const + { + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const SwapchainKHR* pSwapchains, const HdrMetadataEXT* pMetadata, Dispatch const &d) const + { + d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +#else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d) const + { + return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( SwapchainKHR swapchain, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const + { + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getRefreshCycleDurationGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const + { + RefreshCycleDurationGOOGLE displayTimingProperties; + Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); + return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const + { + return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( SwapchainKHR swapchain, Dispatch const &d ) const + { + std::vector presentationTimings; + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + presentationTimings.resize( presentationTimingCount ); + return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2* pInfo, MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + StructureChain structureChain; + MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const + { + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const + { + return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const + { + return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue2( const DeviceQueueInfo2* pQueueInfo, Queue* pQueue, Dispatch const &d) const + { + d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const + { + Queue queue; + d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, ValidationCacheEXT* pValidationCache, Dispatch const &d) const + { + return static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pValidationCache ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + ValidationCacheEXT validationCache; + Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); + return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + ValidationCacheEXT validationCache; + Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const + { + return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( ValidationCacheEXT validationCache, Dispatch const &d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, uint32_t srcCacheCount, const ValidationCacheEXT* pSrcCaches, Dispatch const &d) const + { + return static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::mergeValidationCachesEXT( ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + { + Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const + { + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return support; + } + template + VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + StructureChain structureChain; + DescriptorSetLayoutSupport& support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo* pCreateInfo, DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const + { + d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return support; + } + template + VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const + { + StructureChain structureChain; + DescriptorSetLayoutSupport& support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const + { + return static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), pInfoSize, pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( Pipeline pipeline, ShaderStageFlagBits shaderStage, ShaderInfoTypeAMD infoType, Dispatch const &d ) const + { + std::vector info; + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + info.resize( infoSize ); + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const + { + return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const + { + return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const + { + MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); + return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + { + AndroidHardwareBufferPropertiesANDROID properties; + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + { + StructureChain structureChain; + AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get(); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template + VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const + { + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const + { + struct AHardwareBuffer* buffer; + Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDevice = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class PhysicalDevice + { + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) + : m_physicalDevice( physicalDevice ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) + { + m_physicalDevice = physicalDevice; + return *this; + } +#endif + + PhysicalDevice & operator=( std::nullptr_t ) + { + m_physicalDevice = VK_NULL_HANDLE; + return *this; + } + + bool operator==( PhysicalDevice const & rhs ) const + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=(PhysicalDevice const & rhs ) const + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<(PhysicalDevice const & rhs ) const + { + return m_physicalDevice < rhs.m_physicalDevice; + } + + template + void getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceProperties getProperties(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceFeatures getFeatures(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + FormatProperties getFormatProperties( Format format, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + template + Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + + template + Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = Dispatch() ) const; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + + template + Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = Dispatch() ) const; + template + StructureChain getFeatures2(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = Dispatch() ) const; + template + StructureChain getFeatures2KHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceProperties2 getProperties2(Dispatch const &d = Dispatch() ) const; + template + StructureChain getProperties2(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = Dispatch() ) const; + template + StructureChain getProperties2KHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + FormatProperties2 getFormatProperties2( Format format, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + FormatProperties2 getFormatProperties2KHR( Format format, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties2(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getQueueFamilyProperties2KHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#else + template + ResultValueType::type releaseDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + Result acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + + template + Result getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + MultisamplePropertiesEXT getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; + template + typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const + { + return m_physicalDevice; + } + + explicit operator bool() const + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice; + }; + + static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( PhysicalDeviceProperties* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const + { + PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const + { + PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const + { + PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( Format format, FormatProperties* pFormatProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE FormatProperties PhysicalDevice::getFormatProperties( Format format, Dispatch const &d ) const + { + FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, Dispatch const &d ) const + { + ImageFormatProperties imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice, Dispatch const &d) const + { + return static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Device device; + Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Device device; + Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); + + ObjectDestroy deleter( allocator ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const + { + std::vector displays; + uint32_t displayCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + displays.resize( displayCount ); + return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode, Dispatch const &d) const + { + return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMode ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + DisplayModeKHR mode; + Result result = static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); + return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const + { + DisplayPlaneCapabilitiesKHR capabilities; + Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); + return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), pSupported ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Dispatch const &d ) const + { + Bool32 supported; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), &supported ) ); + return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + SurfaceCapabilitiesKHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, Dispatch const &d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + presentModes.resize( presentModeCount ); + return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); + } +#else + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const + { + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const + { + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( pExternalImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const + { + ExternalImageFormatPropertiesNV externalImageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); + return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const + { + d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( pFeatures ), reinterpret_cast( pLimits ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const + { + DeviceGeneratedCommandsLimitsNVX limits; + d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( &features ), reinterpret_cast( &limits ) ); + return limits; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const + { + PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceFeatures2& features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const + { + PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceFeatures2& features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const + { + PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceProperties2& properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( PhysicalDeviceProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const + { + PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR(Dispatch const &d ) const + { + StructureChain structureChain; + PhysicalDeviceProperties2& properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2( Format format, Dispatch const &d ) const + { + FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( Format format, FormatProperties2* pFormatProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE FormatProperties2 PhysicalDevice::getFormatProperties2KHR( Format format, Dispatch const &d ) const + { + FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + StructureChain structureChain; + ImageFormatProperties2& imageFormatProperties = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2* pImageFormatInfo, ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + StructureChain structureChain; + ImageFormatProperties2& imageFormatProperties = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const + { + PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const + { + PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2* pProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const + { + ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const + { + ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const + { + ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const + { + ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const + { + ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const + { + ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d) const + { + return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::releaseDisplayEXT( DisplayKHR display, Dispatch const &d ) const + { + Result result = static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, DisplayKHR display, Dispatch const &d) const + { + return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( DisplayKHR display, Dispatch const &d ) const + { + Display dpy; + Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + template + VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay, Dispatch const &d) const + { + return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const + { + DisplayKHR display; + Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, Dispatch const &d ) const + { + SurfaceCapabilities2EXT surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, uint32_t* pRectCount, Rect2D* pRects, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( SurfaceKHR surface, Dispatch const &d ) const + { + std::vector rects; + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + rects.resize( rectCount ); + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const + { + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( SampleCountFlagBits samples, Dispatch const &d ) const + { + MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + return multisampleProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + SurfaceCapabilities2KHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + StructureChain structureChain; + SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + struct CmdProcessCommandsInfoNVX + { + CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 ) + : objectTable( objectTable_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , indirectCommandsTokenCount( indirectCommandsTokenCount_ ) + , pIndirectCommandsTokens( pIndirectCommandsTokens_ ) + , maxSequencesCount( maxSequencesCount_ ) + , targetCommandBuffer( targetCommandBuffer_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + { + } + + CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) ); + } + + CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) + { + memcpy( this, &rhs, sizeof( CmdProcessCommandsInfoNVX ) ); + return *this; + } + CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ ) + { + objectTable = objectTable_; + return *this; + } + + CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ ) + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) + { + indirectCommandsTokenCount = indirectCommandsTokenCount_; + return *this; + } + + CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) + { + pIndirectCommandsTokens = pIndirectCommandsTokens_; + return *this; + } + + CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ ) + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ ) + { + targetCommandBuffer = targetCommandBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ ) + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ ) + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ ) + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ ) + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } + + operator const VkCmdProcessCommandsInfoNVX&() const + { + return *reinterpret_cast(this); + } + + bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectTable == rhs.objectTable ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount ) + && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens ) + && ( maxSequencesCount == rhs.maxSequencesCount ) + && ( targetCommandBuffer == rhs.targetCommandBuffer ) + && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) + && ( sequencesCountOffset == rhs.sequencesCountOffset ) + && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) + && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + } + + bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eCmdProcessCommandsInfoNVX; + + public: + const void* pNext = nullptr; + ObjectTableNVX objectTable; + IndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t indirectCommandsTokenCount; + const IndirectCommandsTokenNVX* pIndirectCommandsTokens; + uint32_t maxSequencesCount; + CommandBuffer targetCommandBuffer; + Buffer sequencesCountBuffer; + DeviceSize sequencesCountOffset; + Buffer sequencesIndexBuffer; + DeviceSize sequencesIndexOffset; + }; + static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" ); + + struct PhysicalDeviceGroupProperties + { + operator const VkPhysicalDeviceGroupProperties&() const + { + return *reinterpret_cast(this); + } + + bool operator==( PhysicalDeviceGroupProperties const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( physicalDeviceCount == rhs.physicalDeviceCount ) + && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE * sizeof( PhysicalDevice ) ) == 0 ) + && ( subsetAllocation == rhs.subsetAllocation ); + } + + bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + + public: + void* pNext = nullptr; + uint32_t physicalDeviceCount; + PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + Bool32 subsetAllocation; + }; + static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" ); + + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Instance; + + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDebugReportCallbackEXT = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueSurfaceKHR = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Instance + { + public: + VULKAN_HPP_CONSTEXPR Instance() + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) + : m_instance( instance ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Instance & operator=(VkInstance instance) + { + m_instance = instance; + return *this; + } +#endif + + Instance & operator=( std::nullptr_t ) + { + m_instance = VK_NULL_HANDLE; + return *this; + } + + bool operator==( Instance const & rhs ) const + { + return m_instance == rhs.m_instance; + } + + bool operator!=(Instance const & rhs ) const + { + return m_instance != rhs.m_instance; + } + + bool operator<(Instance const & rhs ) const + { + return m_instance < rhs.m_instance; + } + + template + void destroy( const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + template + Result createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + + template + void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySurfaceKHR( SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_VI_NN + template + Result createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + + template + Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = DispatchLoaderStatic> + typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + template + Result createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + template + Result createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + template + Result createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = Dispatch() ) const; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = Dispatch() ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const + { + return m_instance; + } + + explicit operator bool() const + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance; + }; + + static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); + + template + VULKAN_HPP_INLINE void Instance::destroy( const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const &d ) const + { + d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices, Dispatch const &d) const + { + return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const + { + std::vector physicalDevices; + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + physicalDevices.resize( physicalDeviceCount ); + return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const + { + return d.vkGetInstanceProcAddr( m_instance, pName ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const + { + return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + template + VULKAN_HPP_INLINE Result Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateMirSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateMirSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMirSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateMirSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMirSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( SurfaceKHR surface, Optional allocator, Dispatch const &d ) const + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_VI_NN + template + VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + + template + VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback, Dispatch const &d) const + { + return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCallback ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugReportCallbackEXT callback; + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); + return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugReportCallbackEXT callback; + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const + { + d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() ); +#else + if ( layerPrefix.size() != message.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); + } +#endif // VULKAN_HPP_NO_EXCEPTIONS + d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const + { + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + template + VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + template + VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface, Dispatch const &d) const + { + return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + SurfaceKHR surface; + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + template + VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const + { + return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMessenger ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugUtilsMessengerEXT messenger; + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); + return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + DebugUtilsMessengerEXT messenger; + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); + + ObjectDestroy deleter( *this, allocator ); + return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, const AllocationCallbacks* pAllocator, Dispatch const &d) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + struct DeviceGroupDeviceCreateInfo + { + DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0, const PhysicalDevice* pPhysicalDevices_ = nullptr ) + : physicalDeviceCount( physicalDeviceCount_ ) + , pPhysicalDevices( pPhysicalDevices_ ) + { + } + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) ); + } + + DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) + { + memcpy( this, &rhs, sizeof( DeviceGroupDeviceCreateInfo ) ); + return *this; + } + DeviceGroupDeviceCreateInfo& setPNext( const void* pNext_ ) + { + pNext = pNext_; + return *this; + } + + DeviceGroupDeviceCreateInfo& setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + DeviceGroupDeviceCreateInfo& setPPhysicalDevices( const PhysicalDevice* pPhysicalDevices_ ) + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + + operator const VkDeviceGroupDeviceCreateInfo&() const + { + return *reinterpret_cast(this); + } + + bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( physicalDeviceCount == rhs.physicalDeviceCount ) + && ( pPhysicalDevices == rhs.pPhysicalDevices ); + } + + bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const + { + return !operator==( rhs ); + } + + private: + StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + + public: + const void* pNext = nullptr; + uint32_t physicalDeviceCount; + const PhysicalDevice* pPhysicalDevices; + }; + static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); + + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + + template <> class UniqueHandleTraits {public: using deleter = ObjectDestroy; }; + using UniqueInstance = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + template + Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ); +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + ResultValueType::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = Dispatch() ); +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance, Dispatch const &d) + { + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pInstance ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + { + Instance instance; + Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE ResultValueType::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + { + Instance instance; + Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); + + ObjectDestroy deleter( allocator ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_NV + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_NV + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(EventCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateCreateFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR) + { + return "{}"; + } + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + +#ifdef VK_USE_PLATFORM_MIR_KHR + VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + +#ifdef VK_USE_PLATFORM_VI_NN + VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagBitsNN) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_VI_NN + VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagsNN) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagBitsMVK) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + VULKAN_HPP_INLINE std::string to_string(IOSSurfaceCreateFlagsMVK) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagBitsMVK) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + VULKAN_HPP_INLINE std::string to_string(MacOSSurfaceCreateFlagsMVK) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagBits) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlags) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagBitsNV) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineViewportSwizzleStateCreateFlagsNV) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineDiscardRectangleStateCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagBitsNV) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageToColorStateCreateFlagsNV) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagBitsNV) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCoverageModulationStateCreateFlagsNV) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCacheCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessengerCallbackDataFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagBitsEXT) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationConservativeStateCreateFlagsEXT) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageLayout value) + { + switch (value) + { + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value) + { + switch (value) + { + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value) + { + switch (value) + { + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageType value) + { + switch (value) + { + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageTiling value) + { + switch (value) + { + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageViewType value) + { + switch (value) + { + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value) + { + switch (value) + { + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value) + { + switch (value) + { + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorType value) + { + switch (value) + { + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryType value) + { + switch (value) + { + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BorderColor value) + { + switch (value) + { + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value) + { + switch (value) + { + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value) + { + switch (value) + { + case PipelineCacheHeaderVersion::eOne: return "One"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value) + { + switch (value) + { + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SharingMode value) + { + switch (value) + { + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(IndexType value) + { + switch (value) + { + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(Filter value) + { + switch (value) + { + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicIMG: return "CubicIMG"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value) + { + switch (value) + { + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value) + { + switch (value) + { + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CompareOp value) + { + switch (value) + { + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PolygonMode value) + { + switch (value) + { + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value) + { + switch (value) + { + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CullModeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CullModeFlagBits::eNone) result += "None | "; + if (value & CullModeFlagBits::eFront) result += "Front | "; + if (value & CullModeFlagBits::eBack) result += "Back | "; + if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FrontFace value) + { + switch (value) + { + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BlendFactor value) + { + switch (value) + { + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BlendOp value) + { + switch (value) + { + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(StencilOp value) + { + switch (value) + { + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(LogicOp value) + { + switch (value) + { + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value) + { + switch (value) + { + case InternalAllocationType::eExecutable: return "Executable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value) + { + switch (value) + { + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value) + { + switch (value) + { + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(VertexInputRate value) + { + switch (value) + { + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(Format value) + { + switch (value) + { + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(StructureType value) + { + switch (value) + { + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointerFeatures: return "PhysicalDeviceVariablePointerFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParameterFeatures: return "PhysicalDeviceShaderDrawParameterFeatures"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; + case StructureType::eMirSurfaceCreateInfoKHR: return "MirSurfaceCreateInfoKHR"; + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX"; + case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX"; + case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX"; + case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX"; + case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX"; + case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT: return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT"; + case StructureType::eSamplerReductionModeCreateInfoEXT: return "SamplerReductionModeCreateInfoEXT"; + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::eImageFormatListCreateInfoKHR: return "ImageFormatListCreateInfoKHR"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT: return "DescriptorSetLayoutBindingFlagsCreateInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT: return "PhysicalDeviceDescriptorIndexingFeaturesEXT"; + case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT: return "PhysicalDeviceDescriptorIndexingPropertiesEXT"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT: return "DescriptorSetVariableDescriptorCountAllocateInfoEXT"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT: return "DescriptorSetVariableDescriptorCountLayoutSupportEXT"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: return "PipelineVertexInputDivisorStateCreateInfoEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubpassContents value) + { + switch (value) + { + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DynamicState value) + { + switch (value) + { + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorUpdateTemplateType value) + { + switch (value) + { + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ObjectType value) + { + switch (value) + { + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case ObjectType::eObjectTableNVX: return "ObjectTableNVX"; + case ObjectType::eIndirectCommandsLayoutNVX: return "IndirectCommandsLayoutNVX"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value) + { + switch (value) + { + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueueFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueueFlagBits::eGraphics) result += "Graphics | "; + if (value & QueueFlagBits::eCompute) result += "Compute | "; + if (value & QueueFlagBits::eTransfer) result += "Transfer | "; + if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | "; + if (value & QueueFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits value) + { + switch (value) + { + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DeviceQueueCreateFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value) + { + switch (value) + { + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | "; + if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | "; + if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | "; + if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | "; + if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | "; + if (value & MemoryPropertyFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value) + { + switch (value) + { + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | "; + if (value & MemoryHeapFlagBits::eMultiInstance) result += "MultiInstance | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value) + { + switch (value) + { + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX"; + case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AccessFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | "; + if (value & AccessFlagBits::eIndexRead) result += "IndexRead | "; + if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | "; + if (value & AccessFlagBits::eUniformRead) result += "UniformRead | "; + if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | "; + if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | "; + if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | "; + if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | "; + if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | "; + if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | "; + if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | "; + if (value & AccessFlagBits::eTransferRead) result += "TransferRead | "; + if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | "; + if (value & AccessFlagBits::eHostRead) result += "HostRead | "; + if (value & AccessFlagBits::eHostWrite) result += "HostWrite | "; + if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | "; + if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | "; + if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | "; + if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | "; + if (value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT) result += "ColorAttachmentReadNoncoherentEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value) + { + switch (value) + { + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | "; + if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | "; + if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | "; + if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | "; + if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | "; + if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | "; + if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | "; + if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | "; + if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value) + { + switch (value) + { + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | "; + if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | "; + if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | "; + if (value & BufferCreateFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value) + { + switch (value) + { + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ShaderStageFlagBits::eVertex) result += "Vertex | "; + if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | "; + if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | "; + if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | "; + if (value & ShaderStageFlagBits::eFragment) result += "Fragment | "; + if (value & ShaderStageFlagBits::eCompute) result += "Compute | "; + if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | "; + if (value & ShaderStageFlagBits::eAll) result += "All | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value) + { + switch (value) + { + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | "; + if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | "; + if (value & ImageUsageFlagBits::eSampled) result += "Sampled | "; + if (value & ImageUsageFlagBits::eStorage) result += "Storage | "; + if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | "; + if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | "; + if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | "; + if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value) + { + switch (value) + { + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | "; + if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | "; + if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | "; + if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | "; + if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | "; + if (value & ImageCreateFlagBits::eAlias) result += "Alias | "; + if (value & ImageCreateFlagBits::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | "; + if (value & ImageCreateFlagBits::e2DArrayCompatible) result += "2DArrayCompatible | "; + if (value & ImageCreateFlagBits::eBlockTexelViewCompatible) result += "BlockTexelViewCompatible | "; + if (value & ImageCreateFlagBits::eExtendedUsage) result += "ExtendedUsage | "; + if (value & ImageCreateFlagBits::eProtected) result += "Protected | "; + if (value & ImageCreateFlagBits::eDisjoint) result += "Disjoint | "; + if (value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) result += "SampleLocationsCompatibleDepthEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value) + { + switch (value) + { + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | "; + if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | "; + if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | "; + if (value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex) result += "ViewIndexFromDeviceIndex | "; + if (value & PipelineCreateFlagBits::eDispatchBase) result += "DispatchBase | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value) + { + switch (value) + { + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ColorComponentFlagBits::eR) result += "R | "; + if (value & ColorComponentFlagBits::eG) result += "G | "; + if (value & ColorComponentFlagBits::eB) result += "B | "; + if (value & ColorComponentFlagBits::eA) result += "A | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value) + { + switch (value) + { + case FenceCreateFlagBits::eSignaled: return "Signaled"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value) + { + switch (value) + { + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG"; + case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT: return "SampledImageFilterMinmaxEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | "; + if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | "; + if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | "; + if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | "; + if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | "; + if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | "; + if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | "; + if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | "; + if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | "; + if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | "; + if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | "; + if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | "; + if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | "; + if (value & FormatFeatureFlagBits::eTransferSrc) result += "TransferSrc | "; + if (value & FormatFeatureFlagBits::eTransferDst) result += "TransferDst | "; + if (value & FormatFeatureFlagBits::eMidpointChromaSamples) result += "MidpointChromaSamples | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) result += "SampledImageYcbcrConversionLinearFilter | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if (value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if (value & FormatFeatureFlagBits::eDisjoint) result += "Disjoint | "; + if (value & FormatFeatureFlagBits::eCositedChromaSamples) result += "CositedChromaSamples | "; + if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | "; + if (value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) result += "SampledImageFilterMinmaxEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value) + { + switch (value) + { + case QueryControlFlagBits::ePrecise: return "Precise"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueryControlFlagBits::ePrecise) result += "Precise | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value) + { + switch (value) + { + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueryResultFlagBits::e64) result += "64 | "; + if (value & QueryResultFlagBits::eWait) result += "Wait | "; + if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | "; + if (value & QueryResultFlagBits::ePartial) result += "Partial | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value) + { + switch (value) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | "; + if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | "; + if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value) + { + switch (value) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | "; + if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | "; + if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | "; + if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | "; + if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | "; + if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | "; + if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value) + { + switch (value) + { + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ImageAspectFlagBits::eColor) result += "Color | "; + if (value & ImageAspectFlagBits::eDepth) result += "Depth | "; + if (value & ImageAspectFlagBits::eStencil) result += "Stencil | "; + if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | "; + if (value & ImageAspectFlagBits::ePlane0) result += "Plane0 | "; + if (value & ImageAspectFlagBits::ePlane1) result += "Plane1 | "; + if (value & ImageAspectFlagBits::ePlane2) result += "Plane2 | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value) + { + switch (value) + { + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | "; + if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | "; + if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value) + { + switch (value) + { + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value) + { + switch (value) + { + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | "; + if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | "; + if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | "; + if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | "; + if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | "; + if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | "; + if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | "; + if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | "; + if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | "; + if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | "; + if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | "; + if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | "; + if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | "; + if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | "; + if (value & PipelineStageFlagBits::eHost) result += "Host | "; + if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | "; + if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | "; + if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value) + { + switch (value) + { + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | "; + if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | "; + if (value & CommandPoolCreateFlagBits::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value) + { + switch (value) + { + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value) + { + switch (value) + { + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value) + { + switch (value) + { + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SampleCountFlagBits::e1) result += "1 | "; + if (value & SampleCountFlagBits::e2) result += "2 | "; + if (value & SampleCountFlagBits::e4) result += "4 | "; + if (value & SampleCountFlagBits::e8) result += "8 | "; + if (value & SampleCountFlagBits::e16) result += "16 | "; + if (value & SampleCountFlagBits::e32) result += "32 | "; + if (value & SampleCountFlagBits::e64) result += "64 | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value) + { + switch (value) + { + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value) + { + switch (value) + { + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & StencilFaceFlagBits::eFront) result += "Front | "; + if (value & StencilFaceFlagBits::eBack) result += "Back | "; + if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value) + { + switch (value) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT: return "UpdateAfterBindEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | "; + if (value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) result += "UpdateAfterBindEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value) + { + switch (value) + { + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DependencyFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DependencyFlagBits::eByRegion) result += "ByRegion | "; + if (value & DependencyFlagBits::eDeviceGroup) result += "DeviceGroup | "; + if (value & DependencyFlagBits::eViewLocal) result += "ViewLocal | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value) + { + switch (value) + { + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value) + { + switch (value) + { + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDciP3LinearEXT: return "DciP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value) + { + switch (value) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | "; + if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | "; + if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | "; + if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value) + { + switch (value) + { + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | "; + if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | "; + if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | "; + if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value) + { + switch (value) + { + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | "; + if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | "; + if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | "; + if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | "; + if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | "; + if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value) + { + switch (value) + { + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | "; + if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | "; + if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | "; + if (value & DebugReportFlagBitsEXT::eError) result += "Error | "; + if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value) + { + switch (value) + { + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr"; + case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr"; + case DebugReportObjectTypeEXT::eDebugReportCallbackExt: return "DebugReportCallbackExt"; + case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr"; + case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr"; + case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx"; + case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx"; + case DebugReportObjectTypeEXT::eValidationCacheExt: return "ValidationCacheExt"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value) + { + switch (value) + { + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value) + { + switch (value) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | "; + if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value) + { + switch (value) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | "; + if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | "; + if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value) + { + switch (value) + { + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlagBits value) + { + switch (value) + { + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubgroupFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SubgroupFeatureFlagBits::eBasic) result += "Basic | "; + if (value & SubgroupFeatureFlagBits::eVote) result += "Vote | "; + if (value & SubgroupFeatureFlagBits::eArithmetic) result += "Arithmetic | "; + if (value & SubgroupFeatureFlagBits::eBallot) result += "Ballot | "; + if (value & SubgroupFeatureFlagBits::eShuffle) result += "Shuffle | "; + if (value & SubgroupFeatureFlagBits::eShuffleRelative) result += "ShuffleRelative | "; + if (value & SubgroupFeatureFlagBits::eClustered) result += "Clustered | "; + if (value & SubgroupFeatureFlagBits::eQuad) result += "Quad | "; + if (value & SubgroupFeatureFlagBits::ePartitionedNV) result += "PartitionedNV | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value) + { + switch (value) + { + case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value) + { + if (!value) return "{}"; + std::string result; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | "; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | "; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | "; + if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value) + { + switch (value) + { + case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics"; + case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value) + { + if (!value) return "{}"; + std::string result; + if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | "; + if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value) + { + switch (value) + { + case IndirectCommandsTokenTypeNVX::ePipeline: return "Pipeline"; + case IndirectCommandsTokenTypeNVX::eDescriptorSet: return "DescriptorSet"; + case IndirectCommandsTokenTypeNVX::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNVX::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNVX::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNVX::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNVX::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNVX::eDispatch: return "Dispatch"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value) + { + switch (value) + { + case ObjectEntryTypeNVX::eDescriptorSet: return "DescriptorSet"; + case ObjectEntryTypeNVX::ePipeline: return "Pipeline"; + case ObjectEntryTypeNVX::eIndexBuffer: return "IndexBuffer"; + case ObjectEntryTypeNVX::eVertexBuffer: return "VertexBuffer"; + case ObjectEntryTypeNVX::ePushConstant: return "PushConstant"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits value) + { + switch (value) + { + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT: return "UpdateAfterBindPoolEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) result += "PushDescriptorKHR | "; + if (value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) result += "UpdateAfterBindPoolEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBits value) + { + switch (value) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | "; + if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture) result += "D3D11Texture | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) result += "D3D11TextureKmt | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap) result += "D3D12Heap | "; + if (value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource) result += "D3D12Resource | "; + if (value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) result += "DmaBufEXT | "; + if (value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) result += "AndroidHardwareBufferANDROID | "; + if (value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) result += "HostAllocationEXT | "; + if (value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) result += "HostMappedForeignMemoryEXT | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBits value) + { + switch (value) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalMemoryFeatureFlagBits::eDedicatedOnly) result += "DedicatedOnly | "; + if (value & ExternalMemoryFeatureFlagBits::eExportable) result += "Exportable | "; + if (value & ExternalMemoryFeatureFlagBits::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlagBits value) + { + switch (value) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreHandleTypeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) result += "D3D12Fence | "; + if (value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd) result += "SyncFd | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlagBits value) + { + switch (value) + { + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalSemaphoreFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalSemaphoreFeatureFlagBits::eExportable) result += "Exportable | "; + if (value & ExternalSemaphoreFeatureFlagBits::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlagBits value) + { + switch (value) + { + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SemaphoreImportFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SemaphoreImportFlagBits::eTemporary) result += "Temporary | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlagBits value) + { + switch (value) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceHandleTypeFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalFenceHandleTypeFlagBits::eOpaqueFd) result += "OpaqueFd | "; + if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32) result += "OpaqueWin32 | "; + if (value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | "; + if (value & ExternalFenceHandleTypeFlagBits::eSyncFd) result += "SyncFd | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlagBits value) + { + switch (value) + { + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ExternalFenceFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & ExternalFenceFeatureFlagBits::eExportable) result += "Exportable | "; + if (value & ExternalFenceFeatureFlagBits::eImportable) result += "Importable | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(FenceImportFlagBits value) + { + switch (value) + { + case FenceImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(FenceImportFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & FenceImportFlagBits::eTemporary) result += "Temporary | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagBitsEXT value) + { + switch (value) + { + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & SurfaceCounterFlagBitsEXT::eVblank) result += "Vblank | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DisplayPowerStateEXT value) + { + switch (value) + { + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DeviceEventTypeEXT value) + { + switch (value) + { + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DisplayEventTypeEXT value) + { + switch (value) + { + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlagBits value) + { + switch (value) + { + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(PeerMemoryFeatureFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & PeerMemoryFeatureFlagBits::eCopySrc) result += "CopySrc | "; + if (value & PeerMemoryFeatureFlagBits::eCopyDst) result += "CopyDst | "; + if (value & PeerMemoryFeatureFlagBits::eGenericSrc) result += "GenericSrc | "; + if (value & PeerMemoryFeatureFlagBits::eGenericDst) result += "GenericDst | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlagBits value) + { + switch (value) + { + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(MemoryAllocateFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & MemoryAllocateFlagBits::eDeviceMask) result += "DeviceMask | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagBitsKHR value) + { + switch (value) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DeviceGroupPresentModeFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & DeviceGroupPresentModeFlagBitsKHR::eLocal) result += "Local | "; + if (value & DeviceGroupPresentModeFlagBitsKHR::eRemote) result += "Remote | "; + if (value & DeviceGroupPresentModeFlagBitsKHR::eSum) result += "Sum | "; + if (value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) result += "LocalMultiDevice | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR value) + { + switch (value) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR value) + { + if (!value) return "{}"; + std::string result; + if (value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) result += "SplitInstanceBindRegions | "; + if (value & SwapchainCreateFlagBitsKHR::eProtected) result += "Protected | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ViewportCoordinateSwizzleNV value) + { + switch (value) + { + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DiscardRectangleModeEXT value) + { + switch (value) + { + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits value) + { + switch (value) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags value) + { + if (!value) return "{}"; + std::string result; + if (value & SubpassDescriptionFlagBits::ePerViewAttributesNVX) result += "PerViewAttributesNVX | "; + if (value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) result += "PerViewPositionXOnlyNVX | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(PointClippingBehavior value) + { + switch (value) + { + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerReductionModeEXT value) + { + switch (value) + { + case SamplerReductionModeEXT::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionModeEXT::eMin: return "Min"; + case SamplerReductionModeEXT::eMax: return "Max"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(TessellationDomainOrigin value) + { + switch (value) + { + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrModelConversion value) + { + switch (value) + { + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(SamplerYcbcrRange value) + { + switch (value) + { + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ChromaLocation value) + { + switch (value) + { + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(BlendOverlapEXT value) + { + switch (value) + { + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(CoverageModulationModeNV value) + { + switch (value) + { + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ValidationCacheHeaderVersionEXT value) + { + switch (value) + { + case ValidationCacheHeaderVersionEXT::eOne: return "One"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(ShaderInfoTypeAMD value) + { + switch (value) + { + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(QueueGlobalPriorityEXT value) + { + switch (value) + { + case QueueGlobalPriorityEXT::eLow: return "Low"; + case QueueGlobalPriorityEXT::eMedium: return "Medium"; + case QueueGlobalPriorityEXT::eHigh: return "High"; + case QueueGlobalPriorityEXT::eRealtime: return "Realtime"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagBitsEXT value) + { + switch (value) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageSeverityFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) result += "Verbose | "; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo) result += "Info | "; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning) result += "Warning | "; + if (value & DebugUtilsMessageSeverityFlagBitsEXT::eError) result += "Error | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagBitsEXT value) + { + switch (value) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DebugUtilsMessageTypeFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral) result += "General | "; + if (value & DebugUtilsMessageTypeFlagBitsEXT::eValidation) result += "Validation | "; + if (value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance) result += "Performance | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + VULKAN_HPP_INLINE std::string to_string(ConservativeRasterizationModeEXT value) + { + switch (value) + { + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagBitsEXT value) + { + switch (value) + { + case DescriptorBindingFlagBitsEXT::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBitsEXT::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount: return "VariableDescriptorCount"; + default: return "invalid"; + } + } + + VULKAN_HPP_INLINE std::string to_string(DescriptorBindingFlagsEXT value) + { + if (!value) return "{}"; + std::string result; + if (value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind) result += "UpdateAfterBind | "; + if (value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) result += "UpdateUnusedWhilePending | "; + if (value & DescriptorBindingFlagBitsEXT::ePartiallyBound) result += "PartiallyBound | "; + if (value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) result += "VariableDescriptorCount | "; + return "{" + result.substr(0, result.size() - 3) + "}"; + } + + class DispatchLoaderDynamic + { + public: + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; +#ifdef VK_USE_PLATFORM_IOS_MVK + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0; + PFN_vkCreateInstance vkCreateInstance = 0; +#ifdef VK_USE_PLATFORM_MACOS_MVK + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#ifdef VK_USE_PLATFORM_MIR_KHR + PFN_vkCreateMirSurfaceKHR vkCreateMirSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; +#ifdef VK_USE_PLATFORM_VI_NN + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkFreeMemory vkFreeMemory = 0; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; +#ifdef VK_USE_PLATFORM_MIR_KHR + PFN_vkGetPhysicalDeviceMirPresentationSupportKHR vkGetPhysicalDeviceMirPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0; + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + public: + DispatchLoaderDynamic(Instance instance = Instance(), Device device = Device()) + { + if (instance) + { + init(instance, device); + } + } + + void init(Instance instance, Device device = Device()) + { + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR(device ? device.getProcAddr( "vkAcquireNextImage2KHR") : instance.getProcAddr( "vkAcquireNextImage2KHR")); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR(device ? device.getProcAddr( "vkAcquireNextImageKHR") : instance.getProcAddr( "vkAcquireNextImageKHR")); +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT(device ? device.getProcAddr( "vkAcquireXlibDisplayEXT") : instance.getProcAddr( "vkAcquireXlibDisplayEXT")); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers(device ? device.getProcAddr( "vkAllocateCommandBuffers") : instance.getProcAddr( "vkAllocateCommandBuffers")); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets(device ? device.getProcAddr( "vkAllocateDescriptorSets") : instance.getProcAddr( "vkAllocateDescriptorSets")); + vkAllocateMemory = PFN_vkAllocateMemory(device ? device.getProcAddr( "vkAllocateMemory") : instance.getProcAddr( "vkAllocateMemory")); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer(device ? device.getProcAddr( "vkBeginCommandBuffer") : instance.getProcAddr( "vkBeginCommandBuffer")); + vkBindBufferMemory = PFN_vkBindBufferMemory(device ? device.getProcAddr( "vkBindBufferMemory") : instance.getProcAddr( "vkBindBufferMemory")); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2(device ? device.getProcAddr( "vkBindBufferMemory2") : instance.getProcAddr( "vkBindBufferMemory2")); + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR(device ? device.getProcAddr( "vkBindBufferMemory2KHR") : instance.getProcAddr( "vkBindBufferMemory2KHR")); + vkBindImageMemory = PFN_vkBindImageMemory(device ? device.getProcAddr( "vkBindImageMemory") : instance.getProcAddr( "vkBindImageMemory")); + vkBindImageMemory2 = PFN_vkBindImageMemory2(device ? device.getProcAddr( "vkBindImageMemory2") : instance.getProcAddr( "vkBindImageMemory2")); + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR(device ? device.getProcAddr( "vkBindImageMemory2KHR") : instance.getProcAddr( "vkBindImageMemory2KHR")); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdBeginDebugUtilsLabelEXT")); + vkCmdBeginQuery = PFN_vkCmdBeginQuery(device ? device.getProcAddr( "vkCmdBeginQuery") : instance.getProcAddr( "vkCmdBeginQuery")); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass(device ? device.getProcAddr( "vkCmdBeginRenderPass") : instance.getProcAddr( "vkCmdBeginRenderPass")); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets(device ? device.getProcAddr( "vkCmdBindDescriptorSets") : instance.getProcAddr( "vkCmdBindDescriptorSets")); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer(device ? device.getProcAddr( "vkCmdBindIndexBuffer") : instance.getProcAddr( "vkCmdBindIndexBuffer")); + vkCmdBindPipeline = PFN_vkCmdBindPipeline(device ? device.getProcAddr( "vkCmdBindPipeline") : instance.getProcAddr( "vkCmdBindPipeline")); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers(device ? device.getProcAddr( "vkCmdBindVertexBuffers") : instance.getProcAddr( "vkCmdBindVertexBuffers")); + vkCmdBlitImage = PFN_vkCmdBlitImage(device ? device.getProcAddr( "vkCmdBlitImage") : instance.getProcAddr( "vkCmdBlitImage")); + vkCmdClearAttachments = PFN_vkCmdClearAttachments(device ? device.getProcAddr( "vkCmdClearAttachments") : instance.getProcAddr( "vkCmdClearAttachments")); + vkCmdClearColorImage = PFN_vkCmdClearColorImage(device ? device.getProcAddr( "vkCmdClearColorImage") : instance.getProcAddr( "vkCmdClearColorImage")); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage(device ? device.getProcAddr( "vkCmdClearDepthStencilImage") : instance.getProcAddr( "vkCmdClearDepthStencilImage")); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer(device ? device.getProcAddr( "vkCmdCopyBuffer") : instance.getProcAddr( "vkCmdCopyBuffer")); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage(device ? device.getProcAddr( "vkCmdCopyBufferToImage") : instance.getProcAddr( "vkCmdCopyBufferToImage")); + vkCmdCopyImage = PFN_vkCmdCopyImage(device ? device.getProcAddr( "vkCmdCopyImage") : instance.getProcAddr( "vkCmdCopyImage")); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer(device ? device.getProcAddr( "vkCmdCopyImageToBuffer") : instance.getProcAddr( "vkCmdCopyImageToBuffer")); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults(device ? device.getProcAddr( "vkCmdCopyQueryPoolResults") : instance.getProcAddr( "vkCmdCopyQueryPoolResults")); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT(device ? device.getProcAddr( "vkCmdDebugMarkerBeginEXT") : instance.getProcAddr( "vkCmdDebugMarkerBeginEXT")); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT(device ? device.getProcAddr( "vkCmdDebugMarkerEndEXT") : instance.getProcAddr( "vkCmdDebugMarkerEndEXT")); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT(device ? device.getProcAddr( "vkCmdDebugMarkerInsertEXT") : instance.getProcAddr( "vkCmdDebugMarkerInsertEXT")); + vkCmdDispatch = PFN_vkCmdDispatch(device ? device.getProcAddr( "vkCmdDispatch") : instance.getProcAddr( "vkCmdDispatch")); + vkCmdDispatchBase = PFN_vkCmdDispatchBase(device ? device.getProcAddr( "vkCmdDispatchBase") : instance.getProcAddr( "vkCmdDispatchBase")); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR(device ? device.getProcAddr( "vkCmdDispatchBaseKHR") : instance.getProcAddr( "vkCmdDispatchBaseKHR")); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect(device ? device.getProcAddr( "vkCmdDispatchIndirect") : instance.getProcAddr( "vkCmdDispatchIndirect")); + vkCmdDraw = PFN_vkCmdDraw(device ? device.getProcAddr( "vkCmdDraw") : instance.getProcAddr( "vkCmdDraw")); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed(device ? device.getProcAddr( "vkCmdDrawIndexed") : instance.getProcAddr( "vkCmdDrawIndexed")); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect(device ? device.getProcAddr( "vkCmdDrawIndexedIndirect") : instance.getProcAddr( "vkCmdDrawIndexedIndirect")); + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndexedIndirectCountAMD")); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect(device ? device.getProcAddr( "vkCmdDrawIndirect") : instance.getProcAddr( "vkCmdDrawIndirect")); + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD(device ? device.getProcAddr( "vkCmdDrawIndirectCountAMD") : instance.getProcAddr( "vkCmdDrawIndirectCountAMD")); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdEndDebugUtilsLabelEXT")); + vkCmdEndQuery = PFN_vkCmdEndQuery(device ? device.getProcAddr( "vkCmdEndQuery") : instance.getProcAddr( "vkCmdEndQuery")); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass(device ? device.getProcAddr( "vkCmdEndRenderPass") : instance.getProcAddr( "vkCmdEndRenderPass")); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands(device ? device.getProcAddr( "vkCmdExecuteCommands") : instance.getProcAddr( "vkCmdExecuteCommands")); + vkCmdFillBuffer = PFN_vkCmdFillBuffer(device ? device.getProcAddr( "vkCmdFillBuffer") : instance.getProcAddr( "vkCmdFillBuffer")); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkCmdInsertDebugUtilsLabelEXT")); + vkCmdNextSubpass = PFN_vkCmdNextSubpass(device ? device.getProcAddr( "vkCmdNextSubpass") : instance.getProcAddr( "vkCmdNextSubpass")); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier(device ? device.getProcAddr( "vkCmdPipelineBarrier") : instance.getProcAddr( "vkCmdPipelineBarrier")); + vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX(device ? device.getProcAddr( "vkCmdProcessCommandsNVX") : instance.getProcAddr( "vkCmdProcessCommandsNVX")); + vkCmdPushConstants = PFN_vkCmdPushConstants(device ? device.getProcAddr( "vkCmdPushConstants") : instance.getProcAddr( "vkCmdPushConstants")); + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetKHR")); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkCmdPushDescriptorSetWithTemplateKHR")); + vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX(device ? device.getProcAddr( "vkCmdReserveSpaceForCommandsNVX") : instance.getProcAddr( "vkCmdReserveSpaceForCommandsNVX")); + vkCmdResetEvent = PFN_vkCmdResetEvent(device ? device.getProcAddr( "vkCmdResetEvent") : instance.getProcAddr( "vkCmdResetEvent")); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool(device ? device.getProcAddr( "vkCmdResetQueryPool") : instance.getProcAddr( "vkCmdResetQueryPool")); + vkCmdResolveImage = PFN_vkCmdResolveImage(device ? device.getProcAddr( "vkCmdResolveImage") : instance.getProcAddr( "vkCmdResolveImage")); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants(device ? device.getProcAddr( "vkCmdSetBlendConstants") : instance.getProcAddr( "vkCmdSetBlendConstants")); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias(device ? device.getProcAddr( "vkCmdSetDepthBias") : instance.getProcAddr( "vkCmdSetDepthBias")); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds(device ? device.getProcAddr( "vkCmdSetDepthBounds") : instance.getProcAddr( "vkCmdSetDepthBounds")); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask(device ? device.getProcAddr( "vkCmdSetDeviceMask") : instance.getProcAddr( "vkCmdSetDeviceMask")); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR(device ? device.getProcAddr( "vkCmdSetDeviceMaskKHR") : instance.getProcAddr( "vkCmdSetDeviceMaskKHR")); + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT(device ? device.getProcAddr( "vkCmdSetDiscardRectangleEXT") : instance.getProcAddr( "vkCmdSetDiscardRectangleEXT")); + vkCmdSetEvent = PFN_vkCmdSetEvent(device ? device.getProcAddr( "vkCmdSetEvent") : instance.getProcAddr( "vkCmdSetEvent")); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth(device ? device.getProcAddr( "vkCmdSetLineWidth") : instance.getProcAddr( "vkCmdSetLineWidth")); + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT(device ? device.getProcAddr( "vkCmdSetSampleLocationsEXT") : instance.getProcAddr( "vkCmdSetSampleLocationsEXT")); + vkCmdSetScissor = PFN_vkCmdSetScissor(device ? device.getProcAddr( "vkCmdSetScissor") : instance.getProcAddr( "vkCmdSetScissor")); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask(device ? device.getProcAddr( "vkCmdSetStencilCompareMask") : instance.getProcAddr( "vkCmdSetStencilCompareMask")); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference(device ? device.getProcAddr( "vkCmdSetStencilReference") : instance.getProcAddr( "vkCmdSetStencilReference")); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask(device ? device.getProcAddr( "vkCmdSetStencilWriteMask") : instance.getProcAddr( "vkCmdSetStencilWriteMask")); + vkCmdSetViewport = PFN_vkCmdSetViewport(device ? device.getProcAddr( "vkCmdSetViewport") : instance.getProcAddr( "vkCmdSetViewport")); + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV(device ? device.getProcAddr( "vkCmdSetViewportWScalingNV") : instance.getProcAddr( "vkCmdSetViewportWScalingNV")); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer(device ? device.getProcAddr( "vkCmdUpdateBuffer") : instance.getProcAddr( "vkCmdUpdateBuffer")); + vkCmdWaitEvents = PFN_vkCmdWaitEvents(device ? device.getProcAddr( "vkCmdWaitEvents") : instance.getProcAddr( "vkCmdWaitEvents")); + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD(device ? device.getProcAddr( "vkCmdWriteBufferMarkerAMD") : instance.getProcAddr( "vkCmdWriteBufferMarkerAMD")); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp(device ? device.getProcAddr( "vkCmdWriteTimestamp") : instance.getProcAddr( "vkCmdWriteTimestamp")); +#ifdef VK_USE_PLATFORM_ANDROID_KHR + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR(instance.getProcAddr( "vkCreateAndroidSurfaceKHR")); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkCreateBuffer = PFN_vkCreateBuffer(device ? device.getProcAddr( "vkCreateBuffer") : instance.getProcAddr( "vkCreateBuffer")); + vkCreateBufferView = PFN_vkCreateBufferView(device ? device.getProcAddr( "vkCreateBufferView") : instance.getProcAddr( "vkCreateBufferView")); + vkCreateCommandPool = PFN_vkCreateCommandPool(device ? device.getProcAddr( "vkCreateCommandPool") : instance.getProcAddr( "vkCreateCommandPool")); + vkCreateComputePipelines = PFN_vkCreateComputePipelines(device ? device.getProcAddr( "vkCreateComputePipelines") : instance.getProcAddr( "vkCreateComputePipelines")); + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT(instance.getProcAddr( "vkCreateDebugReportCallbackEXT")); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT(instance.getProcAddr( "vkCreateDebugUtilsMessengerEXT")); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool(device ? device.getProcAddr( "vkCreateDescriptorPool") : instance.getProcAddr( "vkCreateDescriptorPool")); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout(device ? device.getProcAddr( "vkCreateDescriptorSetLayout") : instance.getProcAddr( "vkCreateDescriptorSetLayout")); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplate") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplate")); + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkCreateDescriptorUpdateTemplateKHR")); + vkCreateDevice = PFN_vkCreateDevice(device ? device.getProcAddr( "vkCreateDevice") : instance.getProcAddr( "vkCreateDevice")); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR(device ? device.getProcAddr( "vkCreateDisplayModeKHR") : instance.getProcAddr( "vkCreateDisplayModeKHR")); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR(instance.getProcAddr( "vkCreateDisplayPlaneSurfaceKHR")); + vkCreateEvent = PFN_vkCreateEvent(device ? device.getProcAddr( "vkCreateEvent") : instance.getProcAddr( "vkCreateEvent")); + vkCreateFence = PFN_vkCreateFence(device ? device.getProcAddr( "vkCreateFence") : instance.getProcAddr( "vkCreateFence")); + vkCreateFramebuffer = PFN_vkCreateFramebuffer(device ? device.getProcAddr( "vkCreateFramebuffer") : instance.getProcAddr( "vkCreateFramebuffer")); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines(device ? device.getProcAddr( "vkCreateGraphicsPipelines") : instance.getProcAddr( "vkCreateGraphicsPipelines")); +#ifdef VK_USE_PLATFORM_IOS_MVK + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK(instance.getProcAddr( "vkCreateIOSSurfaceMVK")); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + vkCreateImage = PFN_vkCreateImage(device ? device.getProcAddr( "vkCreateImage") : instance.getProcAddr( "vkCreateImage")); + vkCreateImageView = PFN_vkCreateImageView(device ? device.getProcAddr( "vkCreateImageView") : instance.getProcAddr( "vkCreateImageView")); + vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkCreateIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkCreateIndirectCommandsLayoutNVX")); + vkCreateInstance = PFN_vkCreateInstance(instance.getProcAddr( "vkCreateInstance")); +#ifdef VK_USE_PLATFORM_MACOS_MVK + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK(instance.getProcAddr( "vkCreateMacOSSurfaceMVK")); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#ifdef VK_USE_PLATFORM_MIR_KHR + vkCreateMirSurfaceKHR = PFN_vkCreateMirSurfaceKHR(instance.getProcAddr( "vkCreateMirSurfaceKHR")); +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX(device ? device.getProcAddr( "vkCreateObjectTableNVX") : instance.getProcAddr( "vkCreateObjectTableNVX")); + vkCreatePipelineCache = PFN_vkCreatePipelineCache(device ? device.getProcAddr( "vkCreatePipelineCache") : instance.getProcAddr( "vkCreatePipelineCache")); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout(device ? device.getProcAddr( "vkCreatePipelineLayout") : instance.getProcAddr( "vkCreatePipelineLayout")); + vkCreateQueryPool = PFN_vkCreateQueryPool(device ? device.getProcAddr( "vkCreateQueryPool") : instance.getProcAddr( "vkCreateQueryPool")); + vkCreateRenderPass = PFN_vkCreateRenderPass(device ? device.getProcAddr( "vkCreateRenderPass") : instance.getProcAddr( "vkCreateRenderPass")); + vkCreateSampler = PFN_vkCreateSampler(device ? device.getProcAddr( "vkCreateSampler") : instance.getProcAddr( "vkCreateSampler")); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversion") : instance.getProcAddr( "vkCreateSamplerYcbcrConversion")); + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkCreateSamplerYcbcrConversionKHR") : instance.getProcAddr( "vkCreateSamplerYcbcrConversionKHR")); + vkCreateSemaphore = PFN_vkCreateSemaphore(device ? device.getProcAddr( "vkCreateSemaphore") : instance.getProcAddr( "vkCreateSemaphore")); + vkCreateShaderModule = PFN_vkCreateShaderModule(device ? device.getProcAddr( "vkCreateShaderModule") : instance.getProcAddr( "vkCreateShaderModule")); + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR(device ? device.getProcAddr( "vkCreateSharedSwapchainsKHR") : instance.getProcAddr( "vkCreateSharedSwapchainsKHR")); + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR(device ? device.getProcAddr( "vkCreateSwapchainKHR") : instance.getProcAddr( "vkCreateSwapchainKHR")); + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT(device ? device.getProcAddr( "vkCreateValidationCacheEXT") : instance.getProcAddr( "vkCreateValidationCacheEXT")); +#ifdef VK_USE_PLATFORM_VI_NN + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN(instance.getProcAddr( "vkCreateViSurfaceNN")); +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR(instance.getProcAddr( "vkCreateWaylandSurfaceKHR")); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR(instance.getProcAddr( "vkCreateWin32SurfaceKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR(instance.getProcAddr( "vkCreateXcbSurfaceKHR")); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR(instance.getProcAddr( "vkCreateXlibSurfaceKHR")); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectNameEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectNameEXT")); + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT(device ? device.getProcAddr( "vkDebugMarkerSetObjectTagEXT") : instance.getProcAddr( "vkDebugMarkerSetObjectTagEXT")); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT(instance.getProcAddr( "vkDebugReportMessageEXT")); + vkDestroyBuffer = PFN_vkDestroyBuffer(device ? device.getProcAddr( "vkDestroyBuffer") : instance.getProcAddr( "vkDestroyBuffer")); + vkDestroyBufferView = PFN_vkDestroyBufferView(device ? device.getProcAddr( "vkDestroyBufferView") : instance.getProcAddr( "vkDestroyBufferView")); + vkDestroyCommandPool = PFN_vkDestroyCommandPool(device ? device.getProcAddr( "vkDestroyCommandPool") : instance.getProcAddr( "vkDestroyCommandPool")); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT(instance.getProcAddr( "vkDestroyDebugReportCallbackEXT")); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT(instance.getProcAddr( "vkDestroyDebugUtilsMessengerEXT")); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool(device ? device.getProcAddr( "vkDestroyDescriptorPool") : instance.getProcAddr( "vkDestroyDescriptorPool")); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout(device ? device.getProcAddr( "vkDestroyDescriptorSetLayout") : instance.getProcAddr( "vkDestroyDescriptorSetLayout")); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplate") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplate")); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(device ? device.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR") : instance.getProcAddr( "vkDestroyDescriptorUpdateTemplateKHR")); + vkDestroyDevice = PFN_vkDestroyDevice(device ? device.getProcAddr( "vkDestroyDevice") : instance.getProcAddr( "vkDestroyDevice")); + vkDestroyEvent = PFN_vkDestroyEvent(device ? device.getProcAddr( "vkDestroyEvent") : instance.getProcAddr( "vkDestroyEvent")); + vkDestroyFence = PFN_vkDestroyFence(device ? device.getProcAddr( "vkDestroyFence") : instance.getProcAddr( "vkDestroyFence")); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer(device ? device.getProcAddr( "vkDestroyFramebuffer") : instance.getProcAddr( "vkDestroyFramebuffer")); + vkDestroyImage = PFN_vkDestroyImage(device ? device.getProcAddr( "vkDestroyImage") : instance.getProcAddr( "vkDestroyImage")); + vkDestroyImageView = PFN_vkDestroyImageView(device ? device.getProcAddr( "vkDestroyImageView") : instance.getProcAddr( "vkDestroyImageView")); + vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX(device ? device.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX") : instance.getProcAddr( "vkDestroyIndirectCommandsLayoutNVX")); + vkDestroyInstance = PFN_vkDestroyInstance(instance.getProcAddr( "vkDestroyInstance")); + vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX(device ? device.getProcAddr( "vkDestroyObjectTableNVX") : instance.getProcAddr( "vkDestroyObjectTableNVX")); + vkDestroyPipeline = PFN_vkDestroyPipeline(device ? device.getProcAddr( "vkDestroyPipeline") : instance.getProcAddr( "vkDestroyPipeline")); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache(device ? device.getProcAddr( "vkDestroyPipelineCache") : instance.getProcAddr( "vkDestroyPipelineCache")); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout(device ? device.getProcAddr( "vkDestroyPipelineLayout") : instance.getProcAddr( "vkDestroyPipelineLayout")); + vkDestroyQueryPool = PFN_vkDestroyQueryPool(device ? device.getProcAddr( "vkDestroyQueryPool") : instance.getProcAddr( "vkDestroyQueryPool")); + vkDestroyRenderPass = PFN_vkDestroyRenderPass(device ? device.getProcAddr( "vkDestroyRenderPass") : instance.getProcAddr( "vkDestroyRenderPass")); + vkDestroySampler = PFN_vkDestroySampler(device ? device.getProcAddr( "vkDestroySampler") : instance.getProcAddr( "vkDestroySampler")); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversion") : instance.getProcAddr( "vkDestroySamplerYcbcrConversion")); + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR(device ? device.getProcAddr( "vkDestroySamplerYcbcrConversionKHR") : instance.getProcAddr( "vkDestroySamplerYcbcrConversionKHR")); + vkDestroySemaphore = PFN_vkDestroySemaphore(device ? device.getProcAddr( "vkDestroySemaphore") : instance.getProcAddr( "vkDestroySemaphore")); + vkDestroyShaderModule = PFN_vkDestroyShaderModule(device ? device.getProcAddr( "vkDestroyShaderModule") : instance.getProcAddr( "vkDestroyShaderModule")); + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR(instance.getProcAddr( "vkDestroySurfaceKHR")); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR(device ? device.getProcAddr( "vkDestroySwapchainKHR") : instance.getProcAddr( "vkDestroySwapchainKHR")); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT(device ? device.getProcAddr( "vkDestroyValidationCacheEXT") : instance.getProcAddr( "vkDestroyValidationCacheEXT")); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle(device ? device.getProcAddr( "vkDeviceWaitIdle") : instance.getProcAddr( "vkDeviceWaitIdle")); + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT(device ? device.getProcAddr( "vkDisplayPowerControlEXT") : instance.getProcAddr( "vkDisplayPowerControlEXT")); + vkEndCommandBuffer = PFN_vkEndCommandBuffer(device ? device.getProcAddr( "vkEndCommandBuffer") : instance.getProcAddr( "vkEndCommandBuffer")); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties(device ? device.getProcAddr( "vkEnumerateDeviceExtensionProperties") : instance.getProcAddr( "vkEnumerateDeviceExtensionProperties")); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties(device ? device.getProcAddr( "vkEnumerateDeviceLayerProperties") : instance.getProcAddr( "vkEnumerateDeviceLayerProperties")); + vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties(instance.getProcAddr( "vkEnumerateInstanceExtensionProperties")); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties(instance.getProcAddr( "vkEnumerateInstanceLayerProperties")); + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion(instance.getProcAddr( "vkEnumerateInstanceVersion")); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroups")); + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR(instance.getProcAddr( "vkEnumeratePhysicalDeviceGroupsKHR")); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices(instance.getProcAddr( "vkEnumeratePhysicalDevices")); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges(device ? device.getProcAddr( "vkFlushMappedMemoryRanges") : instance.getProcAddr( "vkFlushMappedMemoryRanges")); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers(device ? device.getProcAddr( "vkFreeCommandBuffers") : instance.getProcAddr( "vkFreeCommandBuffers")); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets(device ? device.getProcAddr( "vkFreeDescriptorSets") : instance.getProcAddr( "vkFreeDescriptorSets")); + vkFreeMemory = PFN_vkFreeMemory(device ? device.getProcAddr( "vkFreeMemory") : instance.getProcAddr( "vkFreeMemory")); +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(device ? device.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID") : instance.getProcAddr( "vkGetAndroidHardwareBufferPropertiesANDROID")); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements(device ? device.getProcAddr( "vkGetBufferMemoryRequirements") : instance.getProcAddr( "vkGetBufferMemoryRequirements")); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2") : instance.getProcAddr( "vkGetBufferMemoryRequirements2")); + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetBufferMemoryRequirements2KHR") : instance.getProcAddr( "vkGetBufferMemoryRequirements2KHR")); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupport") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupport")); + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR(device ? device.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR") : instance.getProcAddr( "vkGetDescriptorSetLayoutSupportKHR")); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeatures")); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR") : instance.getProcAddr( "vkGetDeviceGroupPeerMemoryFeaturesKHR")); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(device ? device.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR") : instance.getProcAddr( "vkGetDeviceGroupPresentCapabilitiesKHR")); + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(device ? device.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR") : instance.getProcAddr( "vkGetDeviceGroupSurfacePresentModesKHR")); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment(device ? device.getProcAddr( "vkGetDeviceMemoryCommitment") : instance.getProcAddr( "vkGetDeviceMemoryCommitment")); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr(device ? device.getProcAddr( "vkGetDeviceProcAddr") : instance.getProcAddr( "vkGetDeviceProcAddr")); + vkGetDeviceQueue = PFN_vkGetDeviceQueue(device ? device.getProcAddr( "vkGetDeviceQueue") : instance.getProcAddr( "vkGetDeviceQueue")); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2(device ? device.getProcAddr( "vkGetDeviceQueue2") : instance.getProcAddr( "vkGetDeviceQueue2")); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR(device ? device.getProcAddr( "vkGetDisplayModePropertiesKHR") : instance.getProcAddr( "vkGetDisplayModePropertiesKHR")); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR(device ? device.getProcAddr( "vkGetDisplayPlaneCapabilitiesKHR") : instance.getProcAddr( "vkGetDisplayPlaneCapabilitiesKHR")); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR(device ? device.getProcAddr( "vkGetDisplayPlaneSupportedDisplaysKHR") : instance.getProcAddr( "vkGetDisplayPlaneSupportedDisplaysKHR")); + vkGetEventStatus = PFN_vkGetEventStatus(device ? device.getProcAddr( "vkGetEventStatus") : instance.getProcAddr( "vkGetEventStatus")); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR(device ? device.getProcAddr( "vkGetFenceFdKHR") : instance.getProcAddr( "vkGetFenceFdKHR")); + vkGetFenceStatus = PFN_vkGetFenceStatus(device ? device.getProcAddr( "vkGetFenceStatus") : instance.getProcAddr( "vkGetFenceStatus")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR(device ? device.getProcAddr( "vkGetFenceWin32HandleKHR") : instance.getProcAddr( "vkGetFenceWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements(device ? device.getProcAddr( "vkGetImageMemoryRequirements") : instance.getProcAddr( "vkGetImageMemoryRequirements")); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2(device ? device.getProcAddr( "vkGetImageMemoryRequirements2") : instance.getProcAddr( "vkGetImageMemoryRequirements2")); + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageMemoryRequirements2KHR")); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements")); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2")); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(device ? device.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR") : instance.getProcAddr( "vkGetImageSparseMemoryRequirements2KHR")); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout(device ? device.getProcAddr( "vkGetImageSubresourceLayout") : instance.getProcAddr( "vkGetImageSubresourceLayout")); + vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr(instance.getProcAddr( "vkGetInstanceProcAddr")); +#ifdef VK_USE_PLATFORM_ANDROID_ANDROID + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(device ? device.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID") : instance.getProcAddr( "vkGetMemoryAndroidHardwareBufferANDROID")); +#endif /*VK_USE_PLATFORM_ANDROID_ANDROID*/ + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR(device ? device.getProcAddr( "vkGetMemoryFdKHR") : instance.getProcAddr( "vkGetMemoryFdKHR")); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR(device ? device.getProcAddr( "vkGetMemoryFdPropertiesKHR") : instance.getProcAddr( "vkGetMemoryFdPropertiesKHR")); + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT(device ? device.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT") : instance.getProcAddr( "vkGetMemoryHostPointerPropertiesEXT")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandleKHR") : instance.getProcAddr( "vkGetMemoryWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_NV + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV(device ? device.getProcAddr( "vkGetMemoryWin32HandleNV") : instance.getProcAddr( "vkGetMemoryWin32HandleNV")); +#endif /*VK_USE_PLATFORM_WIN32_NV*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR(device ? device.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR") : instance.getProcAddr( "vkGetMemoryWin32HandlePropertiesKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE(device ? device.getProcAddr( "vkGetPastPresentationTimingGOOGLE") : instance.getProcAddr( "vkGetPastPresentationTimingGOOGLE")); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceDisplayPlanePropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceDisplayPlanePropertiesKHR")); + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceDisplayPropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceDisplayPropertiesKHR")); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalBufferProperties") : instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferProperties")); + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalBufferPropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceExternalBufferPropertiesKHR")); + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalFenceProperties") : instance.getProcAddr( "vkGetPhysicalDeviceExternalFenceProperties")); + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalFencePropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceExternalFencePropertiesKHR")); + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalImageFormatPropertiesNV") : instance.getProcAddr( "vkGetPhysicalDeviceExternalImageFormatPropertiesNV")); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalSemaphoreProperties") : instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphoreProperties")); + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR")); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures(device ? device.getProcAddr( "vkGetPhysicalDeviceFeatures") : instance.getProcAddr( "vkGetPhysicalDeviceFeatures")); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2(device ? device.getProcAddr( "vkGetPhysicalDeviceFeatures2") : instance.getProcAddr( "vkGetPhysicalDeviceFeatures2")); + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceFeatures2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceFeatures2KHR")); + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceFormatProperties") : instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties")); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceFormatProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2")); + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceFormatProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceFormatProperties2KHR")); + vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX(device ? device.getProcAddr( "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX") : instance.getProcAddr( "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX")); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties") : instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties")); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2")); + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceImageFormatProperties2KHR")); + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceMemoryProperties") : instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties")); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2")); + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceMemoryProperties2KHR")); +#ifdef VK_USE_PLATFORM_MIR_KHR + vkGetPhysicalDeviceMirPresentationSupportKHR = PFN_vkGetPhysicalDeviceMirPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceMirPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceMirPresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_MIR_KHR*/ + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT(device ? device.getProcAddr( "vkGetPhysicalDeviceMultisamplePropertiesEXT") : instance.getProcAddr( "vkGetPhysicalDeviceMultisamplePropertiesEXT")); + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR(device ? device.getProcAddr( "vkGetPhysicalDevicePresentRectanglesKHR") : instance.getProcAddr( "vkGetPhysicalDevicePresentRectanglesKHR")); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceProperties") : instance.getProcAddr( "vkGetPhysicalDeviceProperties")); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceProperties2")); + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceProperties2KHR")); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties") : instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties")); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2")); + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceQueueFamilyProperties2KHR")); + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties(device ? device.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties") : instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties")); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2(device ? device.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2") : instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2")); + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceSparseImageFormatProperties2KHR")); + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2EXT") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2EXT")); + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilities2KHR")); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilitiesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceCapabilitiesKHR")); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceFormats2KHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormats2KHR")); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceFormatsKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceFormatsKHR")); + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfacePresentModesKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfacePresentModesKHR")); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceSurfaceSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceSurfaceSupportKHR")); +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceWaylandPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceWaylandPresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceWin32PresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceWin32PresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceXcbPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceXcbPresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR(device ? device.getProcAddr( "vkGetPhysicalDeviceXlibPresentationSupportKHR") : instance.getProcAddr( "vkGetPhysicalDeviceXlibPresentationSupportKHR")); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData(device ? device.getProcAddr( "vkGetPipelineCacheData") : instance.getProcAddr( "vkGetPipelineCacheData")); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults(device ? device.getProcAddr( "vkGetQueryPoolResults") : instance.getProcAddr( "vkGetQueryPoolResults")); +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_NV + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT(device ? device.getProcAddr( "vkGetRandROutputDisplayEXT") : instance.getProcAddr( "vkGetRandROutputDisplayEXT")); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_NV*/ + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE(device ? device.getProcAddr( "vkGetRefreshCycleDurationGOOGLE") : instance.getProcAddr( "vkGetRefreshCycleDurationGOOGLE")); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity(device ? device.getProcAddr( "vkGetRenderAreaGranularity") : instance.getProcAddr( "vkGetRenderAreaGranularity")); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR(device ? device.getProcAddr( "vkGetSemaphoreFdKHR") : instance.getProcAddr( "vkGetSemaphoreFdKHR")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkGetSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkGetSemaphoreWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD(device ? device.getProcAddr( "vkGetShaderInfoAMD") : instance.getProcAddr( "vkGetShaderInfoAMD")); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT(device ? device.getProcAddr( "vkGetSwapchainCounterEXT") : instance.getProcAddr( "vkGetSwapchainCounterEXT")); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR(device ? device.getProcAddr( "vkGetSwapchainImagesKHR") : instance.getProcAddr( "vkGetSwapchainImagesKHR")); + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR(device ? device.getProcAddr( "vkGetSwapchainStatusKHR") : instance.getProcAddr( "vkGetSwapchainStatusKHR")); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT(device ? device.getProcAddr( "vkGetValidationCacheDataEXT") : instance.getProcAddr( "vkGetValidationCacheDataEXT")); + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR(device ? device.getProcAddr( "vkImportFenceFdKHR") : instance.getProcAddr( "vkImportFenceFdKHR")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR(device ? device.getProcAddr( "vkImportFenceWin32HandleKHR") : instance.getProcAddr( "vkImportFenceWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR(device ? device.getProcAddr( "vkImportSemaphoreFdKHR") : instance.getProcAddr( "vkImportSemaphoreFdKHR")); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR(device ? device.getProcAddr( "vkImportSemaphoreWin32HandleKHR") : instance.getProcAddr( "vkImportSemaphoreWin32HandleKHR")); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges(device ? device.getProcAddr( "vkInvalidateMappedMemoryRanges") : instance.getProcAddr( "vkInvalidateMappedMemoryRanges")); + vkMapMemory = PFN_vkMapMemory(device ? device.getProcAddr( "vkMapMemory") : instance.getProcAddr( "vkMapMemory")); + vkMergePipelineCaches = PFN_vkMergePipelineCaches(device ? device.getProcAddr( "vkMergePipelineCaches") : instance.getProcAddr( "vkMergePipelineCaches")); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT(device ? device.getProcAddr( "vkMergeValidationCachesEXT") : instance.getProcAddr( "vkMergeValidationCachesEXT")); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueBeginDebugUtilsLabelEXT")); + vkQueueBindSparse = PFN_vkQueueBindSparse(device ? device.getProcAddr( "vkQueueBindSparse") : instance.getProcAddr( "vkQueueBindSparse")); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueEndDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueEndDebugUtilsLabelEXT")); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT(device ? device.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT") : instance.getProcAddr( "vkQueueInsertDebugUtilsLabelEXT")); + vkQueuePresentKHR = PFN_vkQueuePresentKHR(device ? device.getProcAddr( "vkQueuePresentKHR") : instance.getProcAddr( "vkQueuePresentKHR")); + vkQueueSubmit = PFN_vkQueueSubmit(device ? device.getProcAddr( "vkQueueSubmit") : instance.getProcAddr( "vkQueueSubmit")); + vkQueueWaitIdle = PFN_vkQueueWaitIdle(device ? device.getProcAddr( "vkQueueWaitIdle") : instance.getProcAddr( "vkQueueWaitIdle")); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT(device ? device.getProcAddr( "vkRegisterDeviceEventEXT") : instance.getProcAddr( "vkRegisterDeviceEventEXT")); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT(device ? device.getProcAddr( "vkRegisterDisplayEventEXT") : instance.getProcAddr( "vkRegisterDisplayEventEXT")); + vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX(device ? device.getProcAddr( "vkRegisterObjectsNVX") : instance.getProcAddr( "vkRegisterObjectsNVX")); + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT(device ? device.getProcAddr( "vkReleaseDisplayEXT") : instance.getProcAddr( "vkReleaseDisplayEXT")); + vkResetCommandBuffer = PFN_vkResetCommandBuffer(device ? device.getProcAddr( "vkResetCommandBuffer") : instance.getProcAddr( "vkResetCommandBuffer")); + vkResetCommandPool = PFN_vkResetCommandPool(device ? device.getProcAddr( "vkResetCommandPool") : instance.getProcAddr( "vkResetCommandPool")); + vkResetDescriptorPool = PFN_vkResetDescriptorPool(device ? device.getProcAddr( "vkResetDescriptorPool") : instance.getProcAddr( "vkResetDescriptorPool")); + vkResetEvent = PFN_vkResetEvent(device ? device.getProcAddr( "vkResetEvent") : instance.getProcAddr( "vkResetEvent")); + vkResetFences = PFN_vkResetFences(device ? device.getProcAddr( "vkResetFences") : instance.getProcAddr( "vkResetFences")); + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectNameEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectNameEXT")); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT(device ? device.getProcAddr( "vkSetDebugUtilsObjectTagEXT") : instance.getProcAddr( "vkSetDebugUtilsObjectTagEXT")); + vkSetEvent = PFN_vkSetEvent(device ? device.getProcAddr( "vkSetEvent") : instance.getProcAddr( "vkSetEvent")); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT(device ? device.getProcAddr( "vkSetHdrMetadataEXT") : instance.getProcAddr( "vkSetHdrMetadataEXT")); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT(instance.getProcAddr( "vkSubmitDebugUtilsMessageEXT")); + vkTrimCommandPool = PFN_vkTrimCommandPool(device ? device.getProcAddr( "vkTrimCommandPool") : instance.getProcAddr( "vkTrimCommandPool")); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR(device ? device.getProcAddr( "vkTrimCommandPoolKHR") : instance.getProcAddr( "vkTrimCommandPoolKHR")); + vkUnmapMemory = PFN_vkUnmapMemory(device ? device.getProcAddr( "vkUnmapMemory") : instance.getProcAddr( "vkUnmapMemory")); + vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX(device ? device.getProcAddr( "vkUnregisterObjectsNVX") : instance.getProcAddr( "vkUnregisterObjectsNVX")); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplate") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplate")); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(device ? device.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR") : instance.getProcAddr( "vkUpdateDescriptorSetWithTemplateKHR")); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets(device ? device.getProcAddr( "vkUpdateDescriptorSets") : instance.getProcAddr( "vkUpdateDescriptorSets")); + vkWaitForFences = PFN_vkWaitForFences(device ? device.getProcAddr( "vkWaitForFences") : instance.getProcAddr( "vkWaitForFences")); + } + }; +} // namespace VULKAN_HPP_NAMESPACE + +#endif diff --git a/include/vulkan/vulkan_android.h b/include/vulkan/vulkan_android.h new file mode 100644 index 0000000..07aaeda --- /dev/null +++ b/include/vulkan/vulkan_android.h @@ -0,0 +1,126 @@ +#ifndef VULKAN_ANDROID_H_ +#define VULKAN_ANDROID_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_android_surface 1 +struct ANativeWindow; + +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" + +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; + +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#define VK_ANDROID_external_memory_android_hardware_buffer 1 +struct AHardwareBuffer; + +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 3 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" + +typedef struct VkAndroidHardwareBufferUsageANDROID { + VkStructureType sType; + void* pNext; + uint64_t androidHardwareBufferUsage; +} VkAndroidHardwareBufferUsageANDROID; + +typedef struct VkAndroidHardwareBufferPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkAndroidHardwareBufferPropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatPropertiesANDROID; + +typedef struct VkImportAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + struct AHardwareBuffer* buffer; +} VkImportAndroidHardwareBufferInfoANDROID; + +typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkMemoryGetAndroidHardwareBufferInfoANDROID; + +typedef struct VkExternalFormatANDROID { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatANDROID; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID( + VkDevice device, + const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( + VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h new file mode 100644 index 0000000..9fefb43 --- /dev/null +++ b/include/vulkan/vulkan_core.h @@ -0,0 +1,7470 @@ +#ifndef VULKAN_CORE_H_ +#define VULKAN_CORE_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_MAKE_VERSION(major, minor, patch) \ + (((major) << 22) | ((minor) << 12) | (patch)) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 + +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) +// Version of this file +#define VK_HEADER_VERSION 74 + + +#define VK_NULL_HANDLE 0 + + + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; +#else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; +#endif +#endif + + + +typedef uint32_t VkFlags; +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkSampleMask; + +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) + +#define VK_LOD_CLAMP_NONE 1000.0f +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_WHOLE_SIZE (~0ULL) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_TRUE 1 +#define VK_FALSE 0 +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 +#define VK_UUID_SIZE 16 +#define VK_MAX_MEMORY_TYPES 32 +#define VK_MAX_MEMORY_HEAPS 16 +#define VK_MAX_EXTENSION_NAME_SIZE 256 +#define VK_MAX_DESCRIPTION_SIZE 256 + + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, + VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_FRAGMENTATION_EXT = -1000161000, + VK_ERROR_NOT_PERMITTED_EXT = -1000174001, + VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, + VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL, + VK_RESULT_END_RANGE = VK_INCOMPLETE, + VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1), + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = 1000120000, + VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, + VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009, + VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR = 1000007000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, + VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, + VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, + VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, + VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, + VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, + VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, + VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, + VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, + VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, + VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, + VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, + VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, + VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, + VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, + VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, + VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, + VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, + VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, + VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, + VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, + VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001, + VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, + VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, + VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, + VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, + VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, + VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, + VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, + VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001, + VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002, + VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, + VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001, + VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, + VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, + VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, + VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = 1000147000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, + VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, + VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = 1000161004, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = 1000174000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, + VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, + VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, + VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, + VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM, + VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, + VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, + VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, + VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, + VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, + VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, + VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, + VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, + VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, + VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, + VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, + VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, + VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, + VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, + VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, + VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, + VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, + VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, + VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, + VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, + VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, + VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, + VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, + VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, + VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, + VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, + VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, + VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, + VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, + VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, + VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, + VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, + VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_ZERO_EXT = 1000148000, + VK_BLEND_OP_SRC_EXT = 1000148001, + VK_BLEND_OP_DST_EXT = 1000148002, + VK_BLEND_OP_SRC_OVER_EXT = 1000148003, + VK_BLEND_OP_DST_OVER_EXT = 1000148004, + VK_BLEND_OP_SRC_IN_EXT = 1000148005, + VK_BLEND_OP_DST_IN_EXT = 1000148006, + VK_BLEND_OP_SRC_OUT_EXT = 1000148007, + VK_BLEND_OP_DST_OUT_EXT = 1000148008, + VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, + VK_BLEND_OP_DST_ATOP_EXT = 1000148010, + VK_BLEND_OP_XOR_EXT = 1000148011, + VK_BLEND_OP_MULTIPLY_EXT = 1000148012, + VK_BLEND_OP_SCREEN_EXT = 1000148013, + VK_BLEND_OP_OVERLAY_EXT = 1000148014, + VK_BLEND_OP_DARKEN_EXT = 1000148015, + VK_BLEND_OP_LIGHTEN_EXT = 1000148016, + VK_BLEND_OP_COLORDODGE_EXT = 1000148017, + VK_BLEND_OP_COLORBURN_EXT = 1000148018, + VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, + VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, + VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, + VK_BLEND_OP_EXCLUSION_EXT = 1000148022, + VK_BLEND_OP_INVERT_EXT = 1000148023, + VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, + VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, + VK_BLEND_OP_LINEARBURN_EXT = 1000148026, + VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, + VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, + VK_BLEND_OP_PINLIGHT_EXT = 1000148029, + VK_BLEND_OP_HARDMIX_EXT = 1000148030, + VK_BLEND_OP_HSL_HUE_EXT = 1000148031, + VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, + VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, + VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, + VK_BLEND_OP_PLUS_EXT = 1000148035, + VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, + VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, + VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, + VK_BLEND_OP_MINUS_EXT = 1000148039, + VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, + VK_BLEND_OP_CONTRAST_EXT = 1000148041, + VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, + VK_BLEND_OP_RED_EXT = 1000148043, + VK_BLEND_OP_GREEN_EXT = 1000148044, + VK_BLEND_OP_BLUE_EXT = 1000148045, + VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, + VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, + VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, + VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_IMG = 1000015000, + VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, + VK_FILTER_END_RANGE = VK_FILTER_LINEAR, + VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, + VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, + VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, + VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, + VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, + VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, + VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, + VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, + VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, + VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, + VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, + VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, + VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, + VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, + VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, + VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, + VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, + VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, + VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, + VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100, + VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, + VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, + VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, + VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, + VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, + VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, + VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; +typedef VkFlags VkDeviceCreateFlags; + +typedef enum VkDeviceQueueCreateFlagBits { + VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDeviceQueueCreateFlagBits; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, + VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, + VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, + VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; +typedef VkFlags VkEventCreateFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, + VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, + VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, + VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; +typedef VkFlags VkImageViewCreateFlags; +typedef VkFlags VkShaderModuleCreateFlags; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, + VK_PIPELINE_CREATE_DISPATCH_BASE = 0x00000010, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, + VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderStageFlagBits; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, + VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; +typedef VkFlags VkSamplerCreateFlags; + +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; +typedef VkFlags VkFramebufferCreateFlags; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, + VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, + VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, + VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, + VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, + VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, + VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, + VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004, + VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, + VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, + VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FRONT_AND_BACK = 0x00000003, + VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + +#define VK_VERSION_1_1 1 +// Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0 + + +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) + +#define VK_MAX_DEVICE_GROUP_SIZE 32 +#define VK_LUID_SIZE 8 +#define VK_QUEUE_FAMILY_EXTERNAL (~0U-1) + + +typedef enum VkPointClippingBehavior { + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_BEGIN_RANGE = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_END_RANGE = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_RANGE_SIZE = (VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY - VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES + 1), + VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPointClippingBehavior; + +typedef enum VkTessellationDomainOrigin { + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_BEGIN_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_END_RANGE = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_RANGE_SIZE = (VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT - VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT + 1), + VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF +} VkTessellationDomainOrigin; + +typedef enum VkSamplerYcbcrModelConversion { + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_BEGIN_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_END_RANGE = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RANGE_SIZE = (VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 - VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY + 1), + VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrModelConversion; + +typedef enum VkSamplerYcbcrRange { + VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, + VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_BEGIN_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_END_RANGE = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_RANGE_SIZE = (VK_SAMPLER_YCBCR_RANGE_ITU_NARROW - VK_SAMPLER_YCBCR_RANGE_ITU_FULL + 1), + VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrRange; + +typedef enum VkChromaLocation { + VK_CHROMA_LOCATION_COSITED_EVEN = 0, + VK_CHROMA_LOCATION_MIDPOINT = 1, + VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_BEGIN_RANGE = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_END_RANGE = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_RANGE_SIZE = (VK_CHROMA_LOCATION_MIDPOINT - VK_CHROMA_LOCATION_COSITED_EVEN + 1), + VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF +} VkChromaLocation; + +typedef enum VkDescriptorUpdateTemplateType { + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET + 1), + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorUpdateTemplateType; + + +typedef enum VkSubgroupFeatureFlagBits { + VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001, + VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002, + VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004, + VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008, + VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010, + VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, + VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, + VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubgroupFeatureFlagBits; +typedef VkFlags VkSubgroupFeatureFlags; + +typedef enum VkPeerMemoryFeatureFlagBits { + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008, + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPeerMemoryFeatureFlagBits; +typedef VkFlags VkPeerMemoryFeatureFlags; + +typedef enum VkMemoryAllocateFlagBits { + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryAllocateFlagBits; +typedef VkFlags VkMemoryAllocateFlags; +typedef VkFlags VkCommandPoolTrimFlags; +typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + +typedef enum VkExternalMemoryHandleTypeFlagBits { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBits; +typedef VkFlags VkExternalMemoryHandleTypeFlags; + +typedef enum VkExternalMemoryFeatureFlagBits { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBits; +typedef VkFlags VkExternalMemoryFeatureFlags; + +typedef enum VkExternalFenceHandleTypeFlagBits { + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceHandleTypeFlagBits; +typedef VkFlags VkExternalFenceHandleTypeFlags; + +typedef enum VkExternalFenceFeatureFlagBits { + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceFeatureFlagBits; +typedef VkFlags VkExternalFenceFeatureFlags; + +typedef enum VkFenceImportFlagBits { + VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT, + VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceImportFlagBits; +typedef VkFlags VkFenceImportFlags; + +typedef enum VkSemaphoreImportFlagBits { + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreImportFlagBits; +typedef VkFlags VkSemaphoreImportFlags; + +typedef enum VkExternalSemaphoreHandleTypeFlagBits { + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreHandleTypeFlagBits; +typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + +typedef enum VkExternalSemaphoreFeatureFlagBits { + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreFeatureFlagBits; +typedef VkFlags VkExternalSemaphoreFeatureFlags; + +typedef struct VkPhysicalDeviceSubgroupProperties { + VkStructureType sType; + void* pNext; + uint32_t subgroupSize; + VkShaderStageFlags supportedStages; + VkSubgroupFeatureFlags supportedOperations; + VkBool32 quadOperationsInAllStages; +} VkPhysicalDeviceSubgroupProperties; + +typedef struct VkBindBufferMemoryInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindBufferMemoryInfo; + +typedef struct VkBindImageMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindImageMemoryInfo; + +typedef struct VkPhysicalDevice16BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; +} VkPhysicalDevice16BitStorageFeatures; + +typedef struct VkMemoryDedicatedRequirements { + VkStructureType sType; + void* pNext; + VkBool32 prefersDedicatedAllocation; + VkBool32 requiresDedicatedAllocation; +} VkMemoryDedicatedRequirements; + +typedef struct VkMemoryDedicatedAllocateInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkMemoryDedicatedAllocateInfo; + +typedef struct VkMemoryAllocateFlagsInfo { + VkStructureType sType; + const void* pNext; + VkMemoryAllocateFlags flags; + uint32_t deviceMask; +} VkMemoryAllocateFlagsInfo; + +typedef struct VkDeviceGroupRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const VkRect2D* pDeviceRenderAreas; +} VkDeviceGroupRenderPassBeginInfo; + +typedef struct VkDeviceGroupCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; +} VkDeviceGroupCommandBufferBeginInfo; + +typedef struct VkDeviceGroupSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; +} VkDeviceGroupSubmitInfo; + +typedef struct VkDeviceGroupBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; +} VkDeviceGroupBindSparseInfo; + +typedef struct VkBindBufferMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindBufferMemoryDeviceGroupInfo; + +typedef struct VkBindImageMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const VkRect2D* pSplitInstanceBindRegions; +} VkBindImageMemoryDeviceGroupInfo; + +typedef struct VkPhysicalDeviceGroupProperties { + VkStructureType sType; + void* pNext; + uint32_t physicalDeviceCount; + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + VkBool32 subsetAllocation; +} VkPhysicalDeviceGroupProperties; + +typedef struct VkDeviceGroupDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t physicalDeviceCount; + const VkPhysicalDevice* pPhysicalDevices; +} VkDeviceGroupDeviceCreateInfo; + +typedef struct VkBufferMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferMemoryRequirementsInfo2; + +typedef struct VkImageMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageMemoryRequirementsInfo2; + +typedef struct VkImageSparseMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageSparseMemoryRequirementsInfo2; + +typedef struct VkMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkMemoryRequirements memoryRequirements; +} VkMemoryRequirements2; + +typedef struct VkSparseImageMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkSparseImageMemoryRequirements memoryRequirements; +} VkSparseImageMemoryRequirements2; + +typedef struct VkPhysicalDeviceFeatures2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceFeatures features; +} VkPhysicalDeviceFeatures2; + +typedef struct VkPhysicalDeviceProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties properties; +} VkPhysicalDeviceProperties2; + +typedef struct VkFormatProperties2 { + VkStructureType sType; + void* pNext; + VkFormatProperties formatProperties; +} VkFormatProperties2; + +typedef struct VkImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkImageFormatProperties imageFormatProperties; +} VkImageFormatProperties2; + +typedef struct VkPhysicalDeviceImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkImageCreateFlags flags; +} VkPhysicalDeviceImageFormatInfo2; + +typedef struct VkQueueFamilyProperties2 { + VkStructureType sType; + void* pNext; + VkQueueFamilyProperties queueFamilyProperties; +} VkQueueFamilyProperties2; + +typedef struct VkPhysicalDeviceMemoryProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceMemoryProperties memoryProperties; +} VkPhysicalDeviceMemoryProperties2; + +typedef struct VkSparseImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkSparseImageFormatProperties properties; +} VkSparseImageFormatProperties2; + +typedef struct VkPhysicalDeviceSparseImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkSampleCountFlagBits samples; + VkImageUsageFlags usage; + VkImageTiling tiling; +} VkPhysicalDeviceSparseImageFormatInfo2; + +typedef struct VkPhysicalDevicePointClippingProperties { + VkStructureType sType; + void* pNext; + VkPointClippingBehavior pointClippingBehavior; +} VkPhysicalDevicePointClippingProperties; + +typedef struct VkInputAttachmentAspectReference { + uint32_t subpass; + uint32_t inputAttachmentIndex; + VkImageAspectFlags aspectMask; +} VkInputAttachmentAspectReference; + +typedef struct VkRenderPassInputAttachmentAspectCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t aspectReferenceCount; + const VkInputAttachmentAspectReference* pAspectReferences; +} VkRenderPassInputAttachmentAspectCreateInfo; + +typedef struct VkImageViewUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags usage; +} VkImageViewUsageCreateInfo; + +typedef struct VkPipelineTessellationDomainOriginStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkTessellationDomainOrigin domainOrigin; +} VkPipelineTessellationDomainOriginStateCreateInfo; + +typedef struct VkRenderPassMultiviewCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; +} VkRenderPassMultiviewCreateInfo; + +typedef struct VkPhysicalDeviceMultiviewFeatures { + VkStructureType sType; + void* pNext; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; +} VkPhysicalDeviceMultiviewFeatures; + +typedef struct VkPhysicalDeviceMultiviewProperties { + VkStructureType sType; + void* pNext; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; +} VkPhysicalDeviceMultiviewProperties; + +typedef struct VkPhysicalDeviceVariablePointerFeatures { + VkStructureType sType; + void* pNext; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; +} VkPhysicalDeviceVariablePointerFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 protectedMemory; +} VkPhysicalDeviceProtectedMemoryFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryProperties { + VkStructureType sType; + void* pNext; + VkBool32 protectedNoFault; +} VkPhysicalDeviceProtectedMemoryProperties; + +typedef struct VkDeviceQueueInfo2 { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; +} VkDeviceQueueInfo2; + +typedef struct VkProtectedSubmitInfo { + VkStructureType sType; + const void* pNext; + VkBool32 protectedSubmit; +} VkProtectedSubmitInfo; + +typedef struct VkSamplerYcbcrConversionCreateInfo { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkSamplerYcbcrModelConversion ycbcrModel; + VkSamplerYcbcrRange ycbcrRange; + VkComponentMapping components; + VkChromaLocation xChromaOffset; + VkChromaLocation yChromaOffset; + VkFilter chromaFilter; + VkBool32 forceExplicitReconstruction; +} VkSamplerYcbcrConversionCreateInfo; + +typedef struct VkSamplerYcbcrConversionInfo { + VkStructureType sType; + const void* pNext; + VkSamplerYcbcrConversion conversion; +} VkSamplerYcbcrConversionInfo; + +typedef struct VkBindImagePlaneMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkBindImagePlaneMemoryInfo; + +typedef struct VkImagePlaneMemoryRequirementsInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkImagePlaneMemoryRequirementsInfo; + +typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures { + VkStructureType sType; + void* pNext; + VkBool32 samplerYcbcrConversion; +} VkPhysicalDeviceSamplerYcbcrConversionFeatures; + +typedef struct VkSamplerYcbcrConversionImageFormatProperties { + VkStructureType sType; + void* pNext; + uint32_t combinedImageSamplerDescriptorCount; +} VkSamplerYcbcrConversionImageFormatProperties; + +typedef struct VkDescriptorUpdateTemplateEntry { + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + size_t offset; + size_t stride; +} VkDescriptorUpdateTemplateEntry; + +typedef struct VkDescriptorUpdateTemplateCreateInfo { + VkStructureType sType; + void* pNext; + VkDescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + VkDescriptorUpdateTemplateType templateType; + VkDescriptorSetLayout descriptorSetLayout; + VkPipelineBindPoint pipelineBindPoint; + VkPipelineLayout pipelineLayout; + uint32_t set; +} VkDescriptorUpdateTemplateCreateInfo; + +typedef struct VkExternalMemoryProperties { + VkExternalMemoryFeatureFlags externalMemoryFeatures; + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlags compatibleHandleTypes; +} VkExternalMemoryProperties; + +typedef struct VkPhysicalDeviceExternalImageFormatInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalImageFormatInfo; + +typedef struct VkExternalImageFormatProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalImageFormatProperties; + +typedef struct VkPhysicalDeviceExternalBufferInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkBufferUsageFlags usage; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalBufferInfo; + +typedef struct VkExternalBufferProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalBufferProperties; + +typedef struct VkPhysicalDeviceIDProperties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; +} VkPhysicalDeviceIDProperties; + +typedef struct VkExternalMemoryImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryImageCreateInfo; + +typedef struct VkExternalMemoryBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryBufferCreateInfo; + +typedef struct VkExportMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExportMemoryAllocateInfo; + +typedef struct VkPhysicalDeviceExternalFenceInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalFenceInfo; + +typedef struct VkExternalFenceProperties { + VkStructureType sType; + void* pNext; + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + VkExternalFenceHandleTypeFlags compatibleHandleTypes; + VkExternalFenceFeatureFlags externalFenceFeatures; +} VkExternalFenceProperties; + +typedef struct VkExportFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlags handleTypes; +} VkExportFenceCreateInfo; + +typedef struct VkExportSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlags handleTypes; +} VkExportSemaphoreCreateInfo; + +typedef struct VkPhysicalDeviceExternalSemaphoreInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalSemaphoreInfo; + +typedef struct VkExternalSemaphoreProperties { + VkStructureType sType; + void* pNext; + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures; +} VkExternalSemaphoreProperties; + +typedef struct VkPhysicalDeviceMaintenance3Properties { + VkStructureType sType; + void* pNext; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceMaintenance3Properties; + +typedef struct VkDescriptorSetLayoutSupport { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkDescriptorSetLayoutSupport; + +typedef struct VkPhysicalDeviceShaderDrawParameterFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceShaderDrawParameterFeatures; + + +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( + uint32_t* pApiVersion); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2( + VkDevice device, + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) + +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" +#define VK_COLORSPACE_SRGB_NONLINEAR_KHR VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, + VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; + +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) + +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; + +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) + +#define VK_KHR_DISPLAY_SPEC_VERSION 21 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" + + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplayModeCreateFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" + +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" + +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" + +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 3 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" + +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" + +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE + +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE + +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL + +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" + +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif + +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" + +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" + +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" + +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" + +typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorPropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" + +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" + +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" + +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" + +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif + +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" + +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; + +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; + +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif + +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" + +typedef VkFenceImportFlags VkFenceImportFlagsKHR; + +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; + + +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; + + + +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" + +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2" + +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" + +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; + +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif + +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" + +typedef VkPhysicalDeviceVariablePointerFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; + + + +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" + +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" + +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" + +typedef struct VkImageFormatListCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfoKHR; + + + +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 1 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" + +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; + +typedef VkChromaLocation VkChromaLocationKHR; + + +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; + +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; + +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; + +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; + +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif + +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" + +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; + +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif + +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME "VK_KHR_maintenance3" + +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; + +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; + + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) + +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 9 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" +#define VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT +#define VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT + + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, + VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, + VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, + VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; + +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" + +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" + +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" + +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_BEGIN_RANGE_AMD = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + VK_SHADER_INFO_TYPE_END_RANGE_AMD = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, + VK_SHADER_INFO_TYPE_RANGE_SIZE_AMD = (VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD - VK_SHADER_INFO_TYPE_STATISTICS_AMD + 1), + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; + +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; + +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" + +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, + VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, + VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; + +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +#define VK_NVX_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) + +#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" + + +typedef enum VkIndirectCommandsTokenTypeNVX { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNVX; + +typedef enum VkObjectEntryTypeNVX { + VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, + VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, + VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, + VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, + VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, + VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, + VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, + VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), + VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF +} VkObjectEntryTypeNVX; + + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNVX; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; + +typedef enum VkObjectEntryUsageFlagBitsNVX { + VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, + VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, + VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF +} VkObjectEntryUsageFlagBitsNVX; +typedef VkFlags VkObjectEntryUsageFlagsNVX; + +typedef struct VkDeviceGeneratedCommandsFeaturesNVX { + VkStructureType sType; + const void* pNext; + VkBool32 computeBindingPointSupport; +} VkDeviceGeneratedCommandsFeaturesNVX; + +typedef struct VkDeviceGeneratedCommandsLimitsNVX { + VkStructureType sType; + const void* pNext; + uint32_t maxIndirectCommandsLayoutTokenCount; + uint32_t maxObjectEntryCounts; + uint32_t minSequenceCountBufferOffsetAlignment; + uint32_t minSequenceIndexBufferOffsetAlignment; + uint32_t minCommandsTokenBufferOffsetAlignment; +} VkDeviceGeneratedCommandsLimitsNVX; + +typedef struct VkIndirectCommandsTokenNVX { + VkIndirectCommandsTokenTypeNVX tokenType; + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsTokenNVX; + +typedef struct VkIndirectCommandsLayoutTokenNVX { + VkIndirectCommandsTokenTypeNVX tokenType; + uint32_t bindingUnit; + uint32_t dynamicCount; + uint32_t divisor; +} VkIndirectCommandsLayoutTokenNVX; + +typedef struct VkIndirectCommandsLayoutCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkIndirectCommandsLayoutUsageFlagsNVX flags; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNVX* pTokens; +} VkIndirectCommandsLayoutCreateInfoNVX; + +typedef struct VkCmdProcessCommandsInfoNVX { + VkStructureType sType; + const void* pNext; + VkObjectTableNVX objectTable; + VkIndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t indirectCommandsTokenCount; + const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens; + uint32_t maxSequencesCount; + VkCommandBuffer targetCommandBuffer; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkCmdProcessCommandsInfoNVX; + +typedef struct VkCmdReserveSpaceForCommandsInfoNVX { + VkStructureType sType; + const void* pNext; + VkObjectTableNVX objectTable; + VkIndirectCommandsLayoutNVX indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkCmdReserveSpaceForCommandsInfoNVX; + +typedef struct VkObjectTableCreateInfoNVX { + VkStructureType sType; + const void* pNext; + uint32_t objectCount; + const VkObjectEntryTypeNVX* pObjectEntryTypes; + const uint32_t* pObjectEntryCounts; + const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags; + uint32_t maxUniformBuffersPerDescriptor; + uint32_t maxStorageBuffersPerDescriptor; + uint32_t maxStorageImagesPerDescriptor; + uint32_t maxSampledImagesPerDescriptor; + uint32_t maxPipelineLayouts; +} VkObjectTableCreateInfoNVX; + +typedef struct VkObjectTableEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; +} VkObjectTableEntryNVX; + +typedef struct VkObjectTablePipelineEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipeline pipeline; +} VkObjectTablePipelineEntryNVX; + +typedef struct VkObjectTableDescriptorSetEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipelineLayout pipelineLayout; + VkDescriptorSet descriptorSet; +} VkObjectTableDescriptorSetEntryNVX; + +typedef struct VkObjectTableVertexBufferEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkBuffer buffer; +} VkObjectTableVertexBufferEntryNVX; + +typedef struct VkObjectTableIndexBufferEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkBuffer buffer; + VkIndexType indexType; +} VkObjectTableIndexBufferEntryNVX; + +typedef struct VkObjectTablePushConstantEntryNVX { + VkObjectEntryTypeNVX type; + VkObjectEntryUsageFlagsNVX flags; + VkPipelineLayout pipelineLayout; + VkShaderStageFlags stageFlags; +} VkObjectTablePushConstantEntryNVX; + + +typedef void (VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable); +typedef void (VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices); +typedef VkResult (VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( + VkCommandBuffer commandBuffer, + const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( + VkCommandBuffer commandBuffer, + const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( + VkDevice device, + VkIndirectCommandsLayoutNVX indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( + VkDevice device, + const VkObjectTableCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkObjectTableNVX* pObjectTable); + +VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( + VkDevice device, + VkObjectTableNVX objectTable, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( + VkDevice device, + VkObjectTableNVX objectTable, + uint32_t objectCount, + const VkObjectTableEntryNVX* const* ppObjectTableEntries, + const uint32_t* pObjectIndices); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( + VkDevice device, + VkObjectTableNVX objectTable, + uint32_t objectCount, + const VkObjectEntryTypeNVX* pObjectEntryTypes, + const uint32_t* pObjectIndices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( + VkPhysicalDevice physicalDevice, + VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, + VkDeviceGeneratedCommandsLimitsNVX* pLimits); +#endif + +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" + +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" + +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" +#define VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT + + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; + +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, + VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, + VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, + VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, + VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, + VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, + VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; + +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" + +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" + + +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" + +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + + +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, + VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; + +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; + +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, + VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; + +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; + +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); +#endif + +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_BEGIN_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + VK_CONSERVATIVE_RASTERIZATION_MODE_END_RANGE_EXT = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT, + VK_CONSERVATIVE_RASTERIZATION_MODE_RANGE_SIZE_EXT = (VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT + 1), + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; + +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; + +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 3 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 1 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" + +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~0U-2) + + +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) + +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 1 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" + +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageType, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" + + +typedef enum VkSamplerReductionModeEXT { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2, + VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, + VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, + VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1), + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSamplerReductionModeEXT; + +typedef struct VkSamplerReductionModeCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSamplerReductionModeEXT reductionMode; +} VkSamplerReductionModeCreateInfoEXT; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 1 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" + + +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" + +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, + VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" + +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; + +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, + VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, + VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; + +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; + +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + + + +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) + +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" +#define VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT + + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, + VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, + VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1), + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; + +typedef VkFlags VkValidationCacheCreateFlagsEXT; + +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" + + +typedef enum VkDescriptorBindingFlagBitsEXT { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = 0x00000008, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDescriptorBindingFlagBitsEXT; +typedef VkFlags VkDescriptorBindingFlagsEXT; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlagsEXT* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef struct VkPhysicalDeviceDescriptorIndexingPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupportEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" + + +typedef enum VkQueueGlobalPriorityEXT { + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = 1024, + VK_QUEUE_GLOBAL_PRIORITY_BEGIN_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + VK_QUEUE_GLOBAL_PRIORITY_END_RANGE_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, + VK_QUEUE_GLOBAL_PRIORITY_RANGE_SIZE_EXT = (VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT + 1), + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_EXT = 0x7FFFFFFF +} VkQueueGlobalPriorityEXT; + +typedef struct VkDeviceQueueGlobalPriorityCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriorityEXT globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" + +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" + +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" + +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" + +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef struct VkVertexInputBindingDivisorDescriptionEXT { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescriptionEXT; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfoEXT; + + + +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_ios.h b/include/vulkan/vulkan_ios.h new file mode 100644 index 0000000..a092481 --- /dev/null +++ b/include/vulkan/vulkan_ios.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_IOS_H_ +#define VULKAN_IOS_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_MVK_ios_surface 1 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" + +typedef VkFlags VkIOSSurfaceCreateFlagsMVK; + +typedef struct VkIOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkIOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkIOSSurfaceCreateInfoMVK; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( + VkInstance instance, + const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_macos.h b/include/vulkan/vulkan_macos.h new file mode 100644 index 0000000..ff0b701 --- /dev/null +++ b/include/vulkan/vulkan_macos.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_MACOS_H_ +#define VULKAN_MACOS_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_MVK_macos_surface 1 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 +#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" + +typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; + +typedef struct VkMacOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkMacOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkMacOSSurfaceCreateInfoMVK; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( + VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_mir.h b/include/vulkan/vulkan_mir.h new file mode 100644 index 0000000..7d24ed2 --- /dev/null +++ b/include/vulkan/vulkan_mir.h @@ -0,0 +1,65 @@ +#ifndef VULKAN_MIR_H_ +#define VULKAN_MIR_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_mir_surface 1 +#define VK_KHR_MIR_SURFACE_SPEC_VERSION 4 +#define VK_KHR_MIR_SURFACE_EXTENSION_NAME "VK_KHR_mir_surface" + +typedef VkFlags VkMirSurfaceCreateFlagsKHR; + +typedef struct VkMirSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkMirSurfaceCreateFlagsKHR flags; + MirConnection* connection; + MirSurface* mirSurface; +} VkMirSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMirSurfaceKHR)(VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR( + VkInstance instance, + const VkMirSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + MirConnection* connection); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_vi.h b/include/vulkan/vulkan_vi.h new file mode 100644 index 0000000..015166b --- /dev/null +++ b/include/vulkan/vulkan_vi.h @@ -0,0 +1,58 @@ +#ifndef VULKAN_VI_H_ +#define VULKAN_VI_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_NN_vi_surface 1 +#define VK_NN_VI_SURFACE_SPEC_VERSION 1 +#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" + +typedef VkFlags VkViSurfaceCreateFlagsNN; + +typedef struct VkViSurfaceCreateInfoNN { + VkStructureType sType; + const void* pNext; + VkViSurfaceCreateFlagsNN flags; + void* window; +} VkViSurfaceCreateInfoNN; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( + VkInstance instance, + const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_wayland.h b/include/vulkan/vulkan_wayland.h new file mode 100644 index 0000000..5ba0827 --- /dev/null +++ b/include/vulkan/vulkan_wayland.h @@ -0,0 +1,65 @@ +#ifndef VULKAN_WAYLAND_H_ +#define VULKAN_WAYLAND_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_wayland_surface 1 +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" + +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; + +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_win32.h b/include/vulkan/vulkan_win32.h new file mode 100644 index 0000000..6a85409 --- /dev/null +++ b/include/vulkan/vulkan_win32.h @@ -0,0 +1,276 @@ +#ifndef VULKAN_WIN32_H_ +#define VULKAN_WIN32_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_win32_surface 1 +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" + +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; + +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif + +#define VK_KHR_external_memory_win32 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" + +typedef struct VkImportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportMemoryWin32HandleInfoKHR; + +typedef struct VkExportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportMemoryWin32HandleInfoKHR; + +typedef struct VkMemoryWin32HandlePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryWin32HandlePropertiesKHR; + +typedef struct VkMemoryGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( + VkDevice device, + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); +#endif + +#define VK_KHR_win32_keyed_mutex 1 +#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" + +typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoKHR; + + + +#define VK_KHR_external_semaphore_win32 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" + +typedef struct VkImportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportSemaphoreWin32HandleInfoKHR; + +typedef struct VkExportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportSemaphoreWin32HandleInfoKHR; + +typedef struct VkD3D12FenceSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; +} VkD3D12FenceSubmitInfoKHR; + +typedef struct VkSemaphoreGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( + VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + +#define VK_KHR_external_fence_win32 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" + +typedef struct VkImportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportFenceWin32HandleInfoKHR; + +typedef struct VkExportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportFenceWin32HandleInfoKHR; + +typedef struct VkFenceGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetWin32HandleInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( + VkDevice device, + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( + VkDevice device, + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + +#define VK_NV_external_memory_win32 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" + +typedef struct VkImportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; +} VkImportMemoryWin32HandleInfoNV; + +typedef struct VkExportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; +} VkExportMemoryWin32HandleInfoNV; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( + VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle); +#endif + +#define VK_NV_win32_keyed_mutex 1 +#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" + +typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoNV; + + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xcb.h b/include/vulkan/vulkan_xcb.h new file mode 100644 index 0000000..ba03600 --- /dev/null +++ b/include/vulkan/vulkan_xcb.h @@ -0,0 +1,66 @@ +#ifndef VULKAN_XCB_H_ +#define VULKAN_XCB_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_xcb_surface 1 +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" + +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; + +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xlib.h b/include/vulkan/vulkan_xlib.h new file mode 100644 index 0000000..e1d967e --- /dev/null +++ b/include/vulkan/vulkan_xlib.h @@ -0,0 +1,66 @@ +#ifndef VULKAN_XLIB_H_ +#define VULKAN_XLIB_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_KHR_xlib_surface 1 +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" + +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; + +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/vulkan/vulkan_xlib_xrandr.h b/include/vulkan/vulkan_xlib_xrandr.h new file mode 100644 index 0000000..117d017 --- /dev/null +++ b/include/vulkan/vulkan_xlib_xrandr.h @@ -0,0 +1,54 @@ +#ifndef VULKAN_XLIB_XRANDR_H_ +#define VULKAN_XLIB_XRANDR_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2018 The Khronos Group Inc. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_EXT_acquire_xlib_display 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" + +typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/registry/cgenerator.py b/registry/cgenerator.py new file mode 100644 index 0000000..a370970 --- /dev/null +++ b/registry/cgenerator.py @@ -0,0 +1,417 @@ +#!/usr/bin/python3 -i +# +# Copyright (c) 2013-2018 The Khronos Group Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os,re,sys,pdb +from generator import * + +# CGeneratorOptions - subclass of GeneratorOptions. +# +# Adds options used by COutputGenerator objects during C language header +# generation. +# +# Additional members +# prefixText - list of strings to prefix generated header with +# (usually a copyright statement + calling convention macros). +# protectFile - True if multiple inclusion protection should be +# generated (based on the filename) around the entire header. +# protectFeature - True if #ifndef..#endif protection should be +# generated around a feature interface in the header file. +# genFuncPointers - True if function pointer typedefs should be +# generated +# protectProto - If conditional protection should be generated +# around prototype declarations, set to either '#ifdef' +# to require opt-in (#ifdef protectProtoStr) or '#ifndef' +# to require opt-out (#ifndef protectProtoStr). Otherwise +# set to None. +# protectProtoStr - #ifdef/#ifndef symbol to use around prototype +# declarations, if protectProto is set +# apicall - string to use for the function declaration prefix, +# such as APICALL on Windows. +# apientry - string to use for the calling convention macro, +# in typedefs, such as APIENTRY. +# apientryp - string to use for the calling convention macro +# in function pointer typedefs, such as APIENTRYP. +# directory - directory into which to generate include files +# indentFuncProto - True if prototype declarations should put each +# parameter on a separate line +# indentFuncPointer - True if typedefed function pointers should put each +# parameter on a separate line +# alignFuncParam - if nonzero and parameters are being put on a +# separate line, align parameter names at the specified column +class CGeneratorOptions(GeneratorOptions): + """Represents options during C interface generation for headers""" + def __init__(self, + filename = None, + directory = '.', + apiname = None, + profile = None, + versions = '.*', + emitversions = '.*', + defaultExtensions = None, + addExtensions = None, + removeExtensions = None, + emitExtensions = None, + sortProcedure = regSortFeatures, + prefixText = "", + genFuncPointers = True, + protectFile = True, + protectFeature = True, + protectProto = None, + protectProtoStr = None, + apicall = '', + apientry = '', + apientryp = '', + indentFuncProto = True, + indentFuncPointer = False, + alignFuncParam = 0): + GeneratorOptions.__init__(self, filename, directory, apiname, profile, + versions, emitversions, defaultExtensions, + addExtensions, removeExtensions, + emitExtensions, sortProcedure) + self.prefixText = prefixText + self.genFuncPointers = genFuncPointers + self.protectFile = protectFile + self.protectFeature = protectFeature + self.protectProto = protectProto + self.protectProtoStr = protectProtoStr + self.apicall = apicall + self.apientry = apientry + self.apientryp = apientryp + self.indentFuncProto = indentFuncProto + self.indentFuncPointer = indentFuncPointer + self.alignFuncParam = alignFuncParam + +# COutputGenerator - subclass of OutputGenerator. +# Generates C-language API interfaces. +# +# ---- methods ---- +# COutputGenerator(errFile, warnFile, diagFile) - args as for +# OutputGenerator. Defines additional internal state. +# ---- methods overriding base class ---- +# beginFile(genOpts) +# endFile() +# beginFeature(interface, emit) +# endFeature() +# genType(typeinfo,name) +# genStruct(typeinfo,name) +# genGroup(groupinfo,name) +# genEnum(enuminfo, name) +# genCmd(cmdinfo) +class COutputGenerator(OutputGenerator): + """Generate specified API interfaces in a specific style, such as a C header""" + # This is an ordered list of sections in the header file. + TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum', + 'group', 'bitmask', 'funcpointer', 'struct'] + ALL_SECTIONS = TYPE_SECTIONS + ['commandPointer', 'command'] + def __init__(self, + errFile = sys.stderr, + warnFile = sys.stderr, + diagFile = sys.stdout): + OutputGenerator.__init__(self, errFile, warnFile, diagFile) + # Internal state - accumulators for different inner block text + self.sections = dict([(section, []) for section in self.ALL_SECTIONS]) + # + def beginFile(self, genOpts): + OutputGenerator.beginFile(self, genOpts) + # C-specific + # + # Multiple inclusion protection & C++ wrappers. + if (genOpts.protectFile and self.genOpts.filename): + headerSym = re.sub('\.h', '_h_', + os.path.basename(self.genOpts.filename)).upper() + write('#ifndef', headerSym, file=self.outFile) + write('#define', headerSym, '1', file=self.outFile) + self.newline() + write('#ifdef __cplusplus', file=self.outFile) + write('extern "C" {', file=self.outFile) + write('#endif', file=self.outFile) + self.newline() + # + # User-supplied prefix text, if any (list of strings) + if (genOpts.prefixText): + for s in genOpts.prefixText: + write(s, file=self.outFile) + # + # Some boilerplate describing what was generated - this + # will probably be removed later since the extensions + # pattern may be very long. + # write('/* Generated C header for:', file=self.outFile) + # write(' * API:', genOpts.apiname, file=self.outFile) + # if (genOpts.profile): + # write(' * Profile:', genOpts.profile, file=self.outFile) + # write(' * Versions considered:', genOpts.versions, file=self.outFile) + # write(' * Versions emitted:', genOpts.emitversions, file=self.outFile) + # write(' * Default extensions included:', genOpts.defaultExtensions, file=self.outFile) + # write(' * Additional extensions included:', genOpts.addExtensions, file=self.outFile) + # write(' * Extensions removed:', genOpts.removeExtensions, file=self.outFile) + # write(' * Extensions emitted:', genOpts.emitExtensions, file=self.outFile) + # write(' */', file=self.outFile) + def endFile(self): + # C-specific + # Finish C++ wrapper and multiple inclusion protection + self.newline() + write('#ifdef __cplusplus', file=self.outFile) + write('}', file=self.outFile) + write('#endif', file=self.outFile) + if (self.genOpts.protectFile and self.genOpts.filename): + self.newline() + write('#endif', file=self.outFile) + # Finish processing in superclass + OutputGenerator.endFile(self) + def beginFeature(self, interface, emit): + # Start processing in superclass + OutputGenerator.beginFeature(self, interface, emit) + # C-specific + # Accumulate includes, defines, types, enums, function pointer typedefs, + # end function prototypes separately for this feature. They're only + # printed in endFeature(). + self.sections = dict([(section, []) for section in self.ALL_SECTIONS]) + def endFeature(self): + # C-specific + # Actually write the interface to the output file. + if (self.emit): + self.newline() + if (self.genOpts.protectFeature): + write('#ifndef', self.featureName, file=self.outFile) + # If type declarations are needed by other features based on + # this one, it may be necessary to suppress the ExtraProtect, + # or move it below the 'for section...' loop. + if (self.featureExtraProtect != None): + write('#ifdef', self.featureExtraProtect, file=self.outFile) + write('#define', self.featureName, '1', file=self.outFile) + for section in self.TYPE_SECTIONS: + contents = self.sections[section] + if contents: + write('\n'.join(contents), file=self.outFile) + self.newline() + if (self.genOpts.genFuncPointers and self.sections['commandPointer']): + write('\n'.join(self.sections['commandPointer']), file=self.outFile) + self.newline() + if (self.sections['command']): + if (self.genOpts.protectProto): + write(self.genOpts.protectProto, + self.genOpts.protectProtoStr, file=self.outFile) + write('\n'.join(self.sections['command']), end='', file=self.outFile) + if (self.genOpts.protectProto): + write('#endif', file=self.outFile) + else: + self.newline() + if (self.featureExtraProtect != None): + write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile) + if (self.genOpts.protectFeature): + write('#endif /*', self.featureName, '*/', file=self.outFile) + # Finish processing in superclass + OutputGenerator.endFeature(self) + # + # Append a definition to the specified section + def appendSection(self, section, text): + # self.sections[section].append('SECTION: ' + section + '\n') + self.sections[section].append(text) + # self.logMsg('diag', 'appendSection(section =', section, 'text =', text) + # + # Type generation + def genType(self, typeinfo, name, alias): + OutputGenerator.genType(self, typeinfo, name, alias) + typeElem = typeinfo.elem + + # Determine the category of the type, and the type section to add + # its definition to. + # 'funcpointer' is added to the 'struct' section as a workaround for + # internal issue #877, since structures and function pointer types + # can have cross-dependencies. + category = typeElem.get('category') + if category == 'funcpointer': + section = 'struct' + else: + section = category + + if category == 'struct' or category == 'union': + # If the type is a struct type, generate it using the + # special-purpose generator. + self.genStruct(typeinfo, name, alias) + else: + if alias: + # If the type is an alias, just emit a typedef declaration + body = 'typedef ' + alias + ' ' + name + ';\n' + else: + # Replace tags with an APIENTRY-style string + # (from self.genOpts). Copy other text through unchanged. + # If the resulting text is an empty string, don't emit it. + body = noneStr(typeElem.text) + for elem in typeElem: + if (elem.tag == 'apientry'): + body += self.genOpts.apientry + noneStr(elem.tail) + else: + body += noneStr(elem.text) + noneStr(elem.tail) + + if body: + # Add extra newline after multi-line entries. + if '\n' in body[0:-1]: + body += '\n' + self.appendSection(section, body) + # + # Struct (e.g. C "struct" type) generation. + # This is a special case of the tag where the contents are + # interpreted as a set of tags instead of freeform C + # C type declarations. The tags are just like + # tags - they are a declaration of a struct or union member. + # Only simple member declarations are supported (no nested + # structs etc.) + # If alias != None, then this struct aliases another; just + # generate a typedef of that alias. + def genStruct(self, typeinfo, typeName, alias): + OutputGenerator.genStruct(self, typeinfo, typeName, alias) + + typeElem = typeinfo.elem + + if alias: + body = 'typedef ' + alias + ' ' + typeName + ';\n' + else: + body = 'typedef ' + typeElem.get('category') + ' ' + typeName + ' {\n' + + targetLen = 0; + for member in typeElem.findall('.//member'): + targetLen = max(targetLen, self.getCParamTypeLength(member)) + for member in typeElem.findall('.//member'): + body += self.makeCParamDecl(member, targetLen + 4) + body += ';\n' + body += '} ' + typeName + ';\n' + + self.appendSection('struct', body) + # + # Group (e.g. C "enum" type) generation. + # These are concatenated together with other types. + # If alias != None, it is the name of another group type + # which aliases this type; just generate that alias. + def genGroup(self, groupinfo, groupName, alias = None): + OutputGenerator.genGroup(self, groupinfo, groupName, alias) + groupElem = groupinfo.elem + + if alias: + # If the group name is aliased, just emit a typedef declaration + # for the alias. + body = 'typedef ' + alias + ' ' + groupName + ';\n' + else: + self.logMsg('diag', 'CGenerator.genGroup group =', groupName, 'alias =', alias) + + # Otherwise, emit an actual enumerated type declaration + expandName = re.sub(r'([0-9a-z_])([A-Z0-9])',r'\1_\2',groupName).upper() + + expandPrefix = expandName + expandSuffix = '' + expandSuffixMatch = re.search(r'[A-Z][A-Z]+$',groupName) + if expandSuffixMatch: + expandSuffix = '_' + expandSuffixMatch.group() + # Strip off the suffix from the prefix + expandPrefix = expandName.rsplit(expandSuffix, 1)[0] + + # Prefix + body = "\ntypedef enum " + groupName + " {\n" + + # @@ Should use the type="bitmask" attribute instead + isEnum = ('FLAG_BITS' not in expandPrefix) + + # Get a list of nested 'enum' tags. + enums = groupElem.findall('enum') + + # Check for and report duplicates, and return a list with them + # removed. + enums = self.checkDuplicateEnums(enums) + + # Loop over the nested 'enum' tags. Keep track of the minimum and + # maximum numeric values, if they can be determined; but only for + # core API enumerants, not extension enumerants. This is inferred + # by looking for 'extends' attributes. + minName = None + + # Accumulate non-numeric enumerant values separately and append + # them following the numeric values, to allow for aliases. + # NOTE: this doesn't do a topological sort yet, so aliases of + # aliases can still get in the wrong order. + aliasText = "" + + for elem in enums: + # Convert the value to an integer and use that to track min/max. + (numVal,strVal) = self.enumToValue(elem, True) + name = elem.get('name') + + # Extension enumerants are only included if they are required + if self.isEnumRequired(elem): + decl = " " + name + " = " + strVal + ",\n" + if numVal != None: + body += decl + else: + aliasText += decl + + # Don't track min/max for non-numbers (numVal == None) + if isEnum and numVal != None and elem.get('extends') is None: + if minName == None: + minName = maxName = name + minValue = maxValue = numVal + elif numVal < minValue: + minName = name + minValue = numVal + elif numVal > maxValue: + maxName = name + maxValue = numVal + + # Now append the non-numeric enumerant values + body += aliasText + + # Generate min/max value tokens and a range-padding enum. Need some + # additional padding to generate correct names... + if isEnum: + body += " " + expandPrefix + "_BEGIN_RANGE" + expandSuffix + " = " + minName + ",\n" + body += " " + expandPrefix + "_END_RANGE" + expandSuffix + " = " + maxName + ",\n" + body += " " + expandPrefix + "_RANGE_SIZE" + expandSuffix + " = (" + maxName + " - " + minName + " + 1),\n" + + body += " " + expandPrefix + "_MAX_ENUM" + expandSuffix + " = 0x7FFFFFFF\n" + + # Postfix + body += "} " + groupName + ";" + + # After either enumerated type or alias paths, add the declaration + # to the appropriate section for the group being defined. + if groupElem.get('type') == 'bitmask': + section = 'bitmask' + else: + section = 'group' + self.appendSection(section, body) + + # Enumerant generation + # tags may specify their values in several ways, but are usually + # just integers. + def genEnum(self, enuminfo, name, alias): + OutputGenerator.genEnum(self, enuminfo, name, alias) + (numVal,strVal) = self.enumToValue(enuminfo.elem, False) + body = '#define ' + name.ljust(33) + ' ' + strVal + self.appendSection('enum', body) + + # + # Command generation + def genCmd(self, cmdinfo, name, alias): + OutputGenerator.genCmd(self, cmdinfo, name, alias) + + # if alias: + # prefix = '// ' + name + ' is an alias of command ' + alias + '\n' + # else: + # prefix = '' + + prefix = '' + decls = self.makeCDecls(cmdinfo.elem) + self.appendSection('command', prefix + decls[0] + '\n') + if (self.genOpts.genFuncPointers): + self.appendSection('commandPointer', decls[1]) diff --git a/registry/generator.py b/registry/generator.py new file mode 100644 index 0000000..a0f79ac --- /dev/null +++ b/registry/generator.py @@ -0,0 +1,595 @@ +#!/usr/bin/python3 -i +# +# Copyright (c) 2013-2018 The Khronos Group Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import unicode_literals +import io,os,re,sys,pdb + +def write( *args, **kwargs ): + file = kwargs.pop('file',sys.stdout) + end = kwargs.pop('end','\n') + file.write(' '.join([str(arg) for arg in args])) + file.write(end) + +# noneStr - returns string argument, or "" if argument is None. +# Used in converting etree Elements into text. +# str - string to convert +def noneStr(str): + if (str): + return str + else: + return "" + +# enquote - returns string argument with surrounding quotes, +# for serialization into Python code. +def enquote(str): + if (str): + return "'" + str + "'" + else: + return None + +# apiName - returns True if name is a Vulkan name (vk/Vk/VK prefix, or a +# function pointer type), False otherwise. +def apiName(str): + return str[0:2].lower() == 'vk' or str[0:3] == 'PFN' + +# Primary sort key for regSortFeatures. +# Sorts by category of the feature name string: +# Core API features (those defined with a tag) +# ARB/KHR/OES (Khronos extensions) +# other (EXT/vendor extensions) +# This will need changing for Vulkan! +def regSortCategoryKey(feature): + if (feature.elem.tag == 'feature'): + return 0 + elif (feature.category == 'ARB' or + feature.category == 'KHR' or + feature.category == 'OES'): + return 1 + else: + return 2 + +# Secondary sort key for regSortFeatures. +# Sorts by extension name. +def regSortNameKey(feature): + return feature.name + +# Second sort key for regSortFeatures. +# Sorts by feature version. elements all have version number "0" +def regSortFeatureVersionKey(feature): + return float(feature.versionNumber) + +# Tertiary sort key for regSortFeatures. +# Sorts by extension number. elements all have extension number 0. +def regSortExtensionNumberKey(feature): + return int(feature.number) + +# regSortFeatures - default sort procedure for features. +# Sorts by primary key of feature category ('feature' or 'extension') +# then by version number (for features) +# then by extension number (for extensions) +def regSortFeatures(featureList): + featureList.sort(key = regSortExtensionNumberKey) + featureList.sort(key = regSortFeatureVersionKey) + featureList.sort(key = regSortCategoryKey) + +# GeneratorOptions - base class for options used during header production +# These options are target language independent, and used by +# Registry.apiGen() and by base OutputGenerator objects. +# +# Members +# filename - basename of file to generate, or None to write to stdout. +# directory - directory in which to generate filename +# apiname - string matching 'apiname' attribute, e.g. 'gl'. +# profile - string specifying API profile , e.g. 'core', or None. +# versions - regex matching API versions to process interfaces for. +# Normally '.*' or '[0-9]\.[0-9]' to match all defined versions. +# emitversions - regex matching API versions to actually emit +# interfaces for (though all requested versions are considered +# when deciding which interfaces to generate). For GL 4.3 glext.h, +# this might be '1\.[2-5]|[2-4]\.[0-9]'. +# defaultExtensions - If not None, a string which must in its +# entirety match the pattern in the "supported" attribute of +# the . Defaults to None. Usually the same as apiname. +# addExtensions - regex matching names of additional extensions +# to include. Defaults to None. +# removeExtensions - regex matching names of extensions to +# remove (after defaultExtensions and addExtensions). Defaults +# to None. +# emitExtensions - regex matching names of extensions to actually emit +# interfaces for (though all requested versions are considered when +# deciding which interfaces to generate). +# sortProcedure - takes a list of FeatureInfo objects and sorts +# them in place to a preferred order in the generated output. +# Default is core API versions, ARB/KHR/OES extensions, all +# other extensions, alphabetically within each group. +# The regex patterns can be None or empty, in which case they match +# nothing. +class GeneratorOptions: + """Represents options during header production from an API registry""" + def __init__(self, + filename = None, + directory = '.', + apiname = None, + profile = None, + versions = '.*', + emitversions = '.*', + defaultExtensions = None, + addExtensions = None, + removeExtensions = None, + emitExtensions = None, + sortProcedure = regSortFeatures): + self.filename = filename + self.directory = directory + self.apiname = apiname + self.profile = profile + self.versions = self.emptyRegex(versions) + self.emitversions = self.emptyRegex(emitversions) + self.defaultExtensions = defaultExtensions + self.addExtensions = self.emptyRegex(addExtensions) + self.removeExtensions = self.emptyRegex(removeExtensions) + self.emitExtensions = self.emptyRegex(emitExtensions) + self.sortProcedure = sortProcedure + # + # Substitute a regular expression which matches no version + # or extension names for None or the empty string. + def emptyRegex(self,pat): + if (pat == None or pat == ''): + return '_nomatch_^' + else: + return pat + +# OutputGenerator - base class for generating API interfaces. +# Manages basic logic, logging, and output file control +# Derived classes actually generate formatted output. +# +# ---- methods ---- +# OutputGenerator(errFile, warnFile, diagFile) +# errFile, warnFile, diagFile - file handles to write errors, +# warnings, diagnostics to. May be None to not write. +# logMsg(level, *args) - log messages of different categories +# level - 'error', 'warn', or 'diag'. 'error' will also +# raise a UserWarning exception +# *args - print()-style arguments +# setExtMap(map) - specify a dictionary map from extension names to +# numbers, used in creating values for extension enumerants. +# makeDir(directory) - create a directory, if not already done. +# Generally called from derived generators creating hierarchies. +# beginFile(genOpts) - start a new interface file +# genOpts - GeneratorOptions controlling what's generated and how +# endFile() - finish an interface file, closing it when done +# beginFeature(interface, emit) - write interface for a feature +# and tag generated features as having been done. +# interface - element for the / to generate +# emit - actually write to the header only when True +# endFeature() - finish an interface. +# genType(typeinfo,name,alias) - generate interface for a type +# typeinfo - TypeInfo for a type +# genStruct(typeinfo,name,alias) - generate interface for a C "struct" type. +# typeinfo - TypeInfo for a type interpreted as a struct +# genGroup(groupinfo,name,alias) - generate interface for a group of enums (C "enum") +# groupinfo - GroupInfo for a group +# genEnum(enuminfo,name,alias) - generate interface for an enum (constant) +# enuminfo - EnumInfo for an enum +# name - enum name +# genCmd(cmdinfo,name,alias) - generate interface for a command +# cmdinfo - CmdInfo for a command +# isEnumRequired(enumElem) - return True if this element is required +# elem - element to test +# makeCDecls(cmd) - return C prototype and function pointer typedef for a +# Element, as a list of two strings +# cmd - Element for the +# newline() - print a newline to the output file (utility function) +# +class OutputGenerator: + """Generate specified API interfaces in a specific style, such as a C header""" + # + # categoryToPath - map XML 'category' to include file directory name + categoryToPath = { + 'bitmask' : 'flags', + 'enum' : 'enums', + 'funcpointer' : 'funcpointers', + 'handle' : 'handles', + 'define' : 'defines', + 'basetype' : 'basetypes', + } + # + # Constructor + def __init__(self, + errFile = sys.stderr, + warnFile = sys.stderr, + diagFile = sys.stdout): + self.outFile = None + self.errFile = errFile + self.warnFile = warnFile + self.diagFile = diagFile + # Internal state + self.featureName = None + self.genOpts = None + self.registry = None + # Used for extension enum value generation + self.extBase = 1000000000 + self.extBlockSize = 1000 + self.madeDirs = {} + # + # logMsg - write a message of different categories to different + # destinations. + # level - + # 'diag' (diagnostic, voluminous) + # 'warn' (warning) + # 'error' (fatal error - raises exception after logging) + # *args - print()-style arguments to direct to corresponding log + def logMsg(self, level, *args): + """Log a message at the given level. Can be ignored or log to a file""" + if (level == 'error'): + strfile = io.StringIO() + write('ERROR:', *args, file=strfile) + if (self.errFile != None): + write(strfile.getvalue(), file=self.errFile) + raise UserWarning(strfile.getvalue()) + elif (level == 'warn'): + if (self.warnFile != None): + write('WARNING:', *args, file=self.warnFile) + elif (level == 'diag'): + if (self.diagFile != None): + write('DIAG:', *args, file=self.diagFile) + else: + raise UserWarning( + '*** FATAL ERROR in Generator.logMsg: unknown level:' + level) + # + # enumToValue - parses and converts an tag into a value. + # Returns a list + # first element - integer representation of the value, or None + # if needsNum is False. The value must be a legal number + # if needsNum is True. + # second element - string representation of the value + # There are several possible representations of values. + # A 'value' attribute simply contains the value. + # A 'bitpos' attribute defines a value by specifying the bit + # position which is set in that value. + # A 'offset','extbase','extends' triplet specifies a value + # as an offset to a base value defined by the specified + # 'extbase' extension name, which is then cast to the + # typename specified by 'extends'. This requires probing + # the registry database, and imbeds knowledge of the + # Vulkan extension enum scheme in this function. + # A 'alias' attribute contains the name of another enum + # which this is an alias of. The other enum must be + # declared first when emitting this enum. + def enumToValue(self, elem, needsNum): + name = elem.get('name') + numVal = None + if ('value' in elem.keys()): + value = elem.get('value') + # print('About to translate value =', value, 'type =', type(value)) + if (needsNum): + numVal = int(value, 0) + # If there's a non-integer, numeric 'type' attribute (e.g. 'u' or + # 'ull'), append it to the string value. + # t = enuminfo.elem.get('type') + # if (t != None and t != '' and t != 'i' and t != 's'): + # value += enuminfo.type + self.logMsg('diag', 'Enum', name, '-> value [', numVal, ',', value, ']') + return [numVal, value] + if ('bitpos' in elem.keys()): + value = elem.get('bitpos') + numVal = int(value, 0) + numVal = 1 << numVal + value = '0x%08x' % numVal + self.logMsg('diag', 'Enum', name, '-> bitpos [', numVal, ',', value, ']') + return [numVal, value] + if ('offset' in elem.keys()): + # Obtain values in the mapping from the attributes + enumNegative = False + offset = int(elem.get('offset'),0) + extnumber = int(elem.get('extnumber'),0) + extends = elem.get('extends') + if ('dir' in elem.keys()): + enumNegative = True + self.logMsg('diag', 'Enum', name, 'offset =', offset, + 'extnumber =', extnumber, 'extends =', extends, + 'enumNegative =', enumNegative) + # Now determine the actual enumerant value, as defined + # in the "Layers and Extensions" appendix of the spec. + numVal = self.extBase + (extnumber - 1) * self.extBlockSize + offset + if (enumNegative): + numVal = -numVal + value = '%d' % numVal + # More logic needed! + self.logMsg('diag', 'Enum', name, '-> offset [', numVal, ',', value, ']') + return [numVal, value] + if 'alias' in elem.keys(): + return [None, elem.get('alias')] + return [None, None] + # + # checkDuplicateEnums - sanity check for enumerated values + # enums - list of Elements + # returns the list with duplicates stripped + def checkDuplicateEnums(self, enums): + # Dictionaries indexed by name and numeric value. + # Entries are [ Element, numVal, strVal ] matching name or value + + nameMap = {} + valueMap = {} + + stripped = [] + for elem in enums: + name = elem.get('name') + (numVal, strVal) = self.enumToValue(elem, True) + + if name in nameMap: + # Duplicate name found; check values + (name2, numVal2, strVal2) = nameMap[name] + + # Duplicate enum values for the same name are benign. This + # happens when defining the same enum conditionally in + # several extension blocks. + if (strVal2 == strVal or (numVal != None and + numVal == numVal2)): + True + # self.logMsg('info', 'checkDuplicateEnums: Duplicate enum (' + name + + # ') found with the same value:' + strVal) + else: + self.logMsg('warn', 'checkDuplicateEnums: Duplicate enum (' + name + + ') found with different values:' + strVal + + ' and ' + strVal2) + + # Don't add the duplicate to the returned list + continue + elif numVal in valueMap: + # Duplicate value found (such as an alias); report it, but + # still add this enum to the list. + (name2, numVal2, strVal2) = valueMap[numVal] + + try: + self.logMsg('warn', 'Two enums found with the same value: ' + + name + ' = ' + name2.get('name') + ' = ' + strVal) + except: + pdb.set_trace() + + # Track this enum to detect followon duplicates + nameMap[name] = [ elem, numVal, strVal ] + if numVal != None: + valueMap[numVal] = [ elem, numVal, strVal ] + + # Add this enum to the list + stripped.append(elem) + + # Return the list + return stripped + # + def makeDir(self, path): + self.logMsg('diag', 'OutputGenerator::makeDir(' + path + ')') + if not (path in self.madeDirs.keys()): + # This can get race conditions with multiple writers, see + # https://stackoverflow.com/questions/273192/ + if not os.path.exists(path): + os.makedirs(path) + self.madeDirs[path] = None + # + def beginFile(self, genOpts): + self.genOpts = genOpts + # + # Open specified output file. Not done in constructor since a + # Generator can be used without writing to a file. + if (self.genOpts.filename != None): + filename = self.genOpts.directory + '/' + self.genOpts.filename + self.outFile = io.open(filename, 'w', encoding='utf-8') + else: + self.outFile = sys.stdout + def endFile(self): + self.errFile and self.errFile.flush() + self.warnFile and self.warnFile.flush() + self.diagFile and self.diagFile.flush() + self.outFile.flush() + if (self.outFile != sys.stdout and self.outFile != sys.stderr): + self.outFile.close() + self.genOpts = None + # + def beginFeature(self, interface, emit): + self.emit = emit + self.featureName = interface.get('name') + # If there's an additional 'protect' attribute in the feature, save it + self.featureExtraProtect = interface.get('protect') + def endFeature(self): + # Derived classes responsible for emitting feature + self.featureName = None + self.featureExtraProtect = None + # Utility method to validate we're generating something only inside a + # tag + def validateFeature(self, featureType, featureName): + if (self.featureName == None): + raise UserWarning('Attempt to generate', featureType, + featureName, 'when not in feature') + # + # Type generation + def genType(self, typeinfo, name, alias): + self.validateFeature('type', name) + # + # Struct (e.g. C "struct" type) generation + def genStruct(self, typeinfo, name, alias): + self.validateFeature('struct', name) + + # The mixed-mode tags may contain no-op tags. + # It is convenient to remove them here where all output generators + # will benefit. + for member in typeinfo.elem.findall('.//member'): + for comment in member.findall('comment'): + member.remove(comment) + # + # Group (e.g. C "enum" type) generation + def genGroup(self, groupinfo, name, alias): + self.validateFeature('group', name) + # + # Enumerant (really, constant) generation + def genEnum(self, enuminfo, name, alias): + self.validateFeature('enum', name) + # + # Command generation + def genCmd(self, cmd, name, alias): + self.validateFeature('command', name) + # + # Utility functions - turn a into C-language prototype + # and typedef declarations for that name. + # name - contents of tag + # tail - whatever text follows that tag in the Element + def makeProtoName(self, name, tail): + return self.genOpts.apientry + name + tail + def makeTypedefName(self, name, tail): + return '(' + self.genOpts.apientryp + 'PFN_' + name + tail + ')' + # + # makeCParamDecl - return a string which is an indented, formatted + # declaration for a or block (e.g. function parameter + # or structure/union member). + # param - Element ( or ) to format + # aligncol - if non-zero, attempt to align the nested element + # at this column + def makeCParamDecl(self, param, aligncol): + paramdecl = ' ' + noneStr(param.text) + for elem in param: + text = noneStr(elem.text) + tail = noneStr(elem.tail) + if (elem.tag == 'name' and aligncol > 0): + self.logMsg('diag', 'Aligning parameter', elem.text, 'to column', self.genOpts.alignFuncParam) + # Align at specified column, if possible + paramdecl = paramdecl.rstrip() + oldLen = len(paramdecl) + # This works around a problem where very long type names - + # longer than the alignment column - would run into the tail + # text. + paramdecl = paramdecl.ljust(aligncol-1) + ' ' + newLen = len(paramdecl) + self.logMsg('diag', 'Adjust length of parameter decl from', oldLen, 'to', newLen, ':', paramdecl) + paramdecl += text + tail + return paramdecl + # + # getCParamTypeLength - return the length of the type field is an indented, formatted + # declaration for a or block (e.g. function parameter + # or structure/union member). + # param - Element ( or ) to identify + def getCParamTypeLength(self, param): + paramdecl = ' ' + noneStr(param.text) + for elem in param: + text = noneStr(elem.text) + tail = noneStr(elem.tail) + if (elem.tag == 'name'): + # Align at specified column, if possible + newLen = len(paramdecl.rstrip()) + self.logMsg('diag', 'Identifying length of', elem.text, 'as', newLen) + paramdecl += text + tail + return newLen + # + # isEnumRequired(elem) - return True if this element is + # required, False otherwise + # elem - element to test + def isEnumRequired(self, elem): + required = elem.get('required') != None + self.logMsg('diag', 'isEnumRequired:', elem.get('name'), + '->', required) + return required + + #@@@ This code is overridden by equivalent code now run in + #@@@ Registry.generateFeature + + required = False + + extname = elem.get('extname') + if extname is not None: + # 'supported' attribute was injected when the element was + # moved into the group in Registry.parseTree() + if self.genOpts.defaultExtensions == elem.get('supported'): + required = True + elif re.match(self.genOpts.addExtensions, extname) is not None: + required = True + elif elem.get('version') is not None: + required = re.match(self.genOpts.emitversions, elem.get('version')) is not None + else: + required = True + + return required + + # + # makeCDecls - return C prototype and function pointer typedef for a + # command, as a two-element list of strings. + # cmd - Element containing a tag + def makeCDecls(self, cmd): + """Generate C function pointer typedef for Element""" + proto = cmd.find('proto') + params = cmd.findall('param') + # Begin accumulating prototype and typedef strings + pdecl = self.genOpts.apicall + tdecl = 'typedef ' + # + # Insert the function return type/name. + # For prototypes, add APIENTRY macro before the name + # For typedefs, add (APIENTRY *) around the name and + # use the PFN_cmdnameproc naming convention. + # Done by walking the tree for element by element. + # etree has elem.text followed by (elem[i], elem[i].tail) + # for each child element and any following text + # Leading text + pdecl += noneStr(proto.text) + tdecl += noneStr(proto.text) + # For each child element, if it's a wrap in appropriate + # declaration. Otherwise append its contents and tail contents. + for elem in proto: + text = noneStr(elem.text) + tail = noneStr(elem.tail) + if (elem.tag == 'name'): + pdecl += self.makeProtoName(text, tail) + tdecl += self.makeTypedefName(text, tail) + else: + pdecl += text + tail + tdecl += text + tail + # Now add the parameter declaration list, which is identical + # for prototypes and typedefs. Concatenate all the text from + # a node without the tags. No tree walking required + # since all tags are ignored. + # Uses: self.indentFuncProto + # self.indentFuncPointer + # self.alignFuncParam + # Might be able to doubly-nest the joins, e.g. + # ','.join(('_'.join([l[i] for i in range(0,len(l))]) + n = len(params) + # Indented parameters + if n > 0: + indentdecl = '(\n' + for i in range(0,n): + paramdecl = self.makeCParamDecl(params[i], self.genOpts.alignFuncParam) + if (i < n - 1): + paramdecl += ',\n' + else: + paramdecl += ');' + indentdecl += paramdecl + else: + indentdecl = '(void);' + # Non-indented parameters + paramdecl = '(' + if n > 0: + for i in range(0,n): + paramdecl += ''.join([t for t in params[i].itertext()]) + if (i < n - 1): + paramdecl += ', ' + else: + paramdecl += 'void' + paramdecl += ");"; + return [ pdecl + indentdecl, tdecl + paramdecl ] + # + def newline(self): + write('', file=self.outFile) + + def setRegistry(self, registry): + self.registry = registry + # diff --git a/registry/genvk.py b/registry/genvk.py new file mode 100644 index 0000000..184ff0b --- /dev/null +++ b/registry/genvk.py @@ -0,0 +1,529 @@ +#!/usr/bin/python3 +# +# Copyright (c) 2013-2018 The Khronos Group Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse, cProfile, pdb, string, sys, time +from reg import * +from generator import write +from cgenerator import CGeneratorOptions, COutputGenerator +from docgenerator import DocGeneratorOptions, DocOutputGenerator +from extensionmetadocgenerator import ExtensionMetaDocGeneratorOptions, ExtensionMetaDocOutputGenerator +from pygenerator import PyOutputGenerator +from validitygenerator import ValidityOutputGenerator +from hostsyncgenerator import HostSynchronizationOutputGenerator +from extensionStubSource import ExtensionStubSourceOutputGenerator + +# Simple timer functions +startTime = None + +def startTimer(timeit): + global startTime + startTime = time.clock() + +def endTimer(timeit, msg): + global startTime + endTime = time.clock() + if (timeit): + write(msg, endTime - startTime, file=sys.stderr) + startTime = None + +# Turn a list of strings into a regexp string matching exactly those strings +def makeREstring(list, default = None): + if len(list) > 0 or default == None: + return '^(' + '|'.join(list) + ')$' + else: + return default + +# Returns a directory of [ generator function, generator options ] indexed +# by specified short names. The generator options incorporate the following +# parameters: +# +# args is an parsed argument object; see below for the fields that are used. +def makeGenOpts(args): + global genOpts + genOpts = {} + + # Default class of extensions to include, or None + defaultExtensions = args.defaultExtensions + + # Additional extensions to include (list of extensions) + extensions = args.extension + + # Extensions to remove (list of extensions) + removeExtensions = args.removeExtensions + + # Extensions to emit (list of extensions) + emitExtensions = args.emitExtensions + + # Features to include (list of features) + features = args.feature + + # Whether to disable inclusion protect in headers + protect = args.protect + + # Output target directory + directory = args.directory + + # Descriptive names for various regexp patterns used to select + # versions and extensions + allFeatures = allExtensions = '.*' + noFeatures = noExtensions = None + + # Turn lists of names/patterns into matching regular expressions + addExtensionsPat = makeREstring(extensions, None) + removeExtensionsPat = makeREstring(removeExtensions, None) + emitExtensionsPat = makeREstring(emitExtensions, allExtensions) + featuresPat = makeREstring(features, allFeatures) + + # Copyright text prefixing all headers (list of strings). + prefixStrings = [ + '/*', + '** Copyright (c) 2015-2018 The Khronos Group Inc.', + '**', + '** Licensed under the Apache License, Version 2.0 (the "License");', + '** you may not use this file except in compliance with the License.', + '** You may obtain a copy of the License at', + '**', + '** http://www.apache.org/licenses/LICENSE-2.0', + '**', + '** Unless required by applicable law or agreed to in writing, software', + '** distributed under the License is distributed on an "AS IS" BASIS,', + '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.', + '** See the License for the specific language governing permissions and', + '** limitations under the License.', + '*/', + '' + ] + + # Text specific to Vulkan headers + vkPrefixStrings = [ + '/*', + '** This header is generated from the Khronos Vulkan XML API Registry.', + '**', + '*/', + '' + ] + + # Defaults for generating re-inclusion protection wrappers (or not) + protectFile = protect + protectFeature = protect + protectProto = protect + + # API include files for spec and ref pages + # Overwrites include subdirectories in spec source tree + # The generated include files do not include the calling convention + # macros (apientry etc.), unlike the header files. + # Because the 1.0 core branch includes ref pages for extensions, + # all the extension interfaces need to be generated, even though + # none are used by the core spec itself. + genOpts['apiinc'] = [ + DocOutputGenerator, + DocGeneratorOptions( + filename = 'timeMarker', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = featuresPat, + defaultExtensions = None, + addExtensions = addExtensionsPat, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat, + prefixText = prefixStrings + vkPrefixStrings, + apicall = '', + apientry = '', + apientryp = '*', + alignFuncParam = 48, + expandEnumerants = False) + ] + + # API names to validate man/api spec includes & links + genOpts['vkapi.py'] = [ + PyOutputGenerator, + DocGeneratorOptions( + filename = 'vkapi.py', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = featuresPat, + defaultExtensions = None, + addExtensions = addExtensionsPat, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat) + ] + + # API validity files for spec + genOpts['validinc'] = [ + ValidityOutputGenerator, + DocGeneratorOptions( + filename = 'timeMarker', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = featuresPat, + defaultExtensions = None, + addExtensions = addExtensionsPat, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat) + ] + + # API host sync table files for spec + genOpts['hostsyncinc'] = [ + HostSynchronizationOutputGenerator, + DocGeneratorOptions( + filename = 'timeMarker', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = featuresPat, + defaultExtensions = None, + addExtensions = addExtensionsPat, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat) + ] + + # Extension stub source dispatcher + genOpts['vulkan_ext.c'] = [ + ExtensionStubSourceOutputGenerator, + CGeneratorOptions( + filename = 'vulkan_ext.c', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = None, + defaultExtensions = None, + addExtensions = '.*', + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat, + prefixText = prefixStrings + vkPrefixStrings, + alignFuncParam = 48) + ] + + # Extension metainformation for spec extension appendices + genOpts['extinc'] = [ + ExtensionMetaDocOutputGenerator, + ExtensionMetaDocGeneratorOptions( + filename = 'timeMarker', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = None, + defaultExtensions = defaultExtensions, + addExtensions = None, + removeExtensions = None, + emitExtensions = emitExtensionsPat) + ] + + # Platform extensions, in their own header files + # Each element of the platforms[] array defines information for + # generating a single platform: + # [0] is the generated header file name + # [1] is the set of platform extensions to generate + # [2] is additional extensions whose interfaces should be considered, + # but suppressed in the output, to avoid duplicate definitions of + # dependent types like VkDisplayKHR and VkSurfaceKHR which come from + # non-platform extensions. + + # Track all platform extensions, for exclusion from vulkan_core.h + allPlatformExtensions = [] + + # Extensions suppressed for all platforms. + # Covers common WSI extension types. + commonSuppressExtensions = [ 'VK_KHR_display', 'VK_KHR_swapchain' ] + + platforms = [ + [ 'vulkan_android.h', [ 'VK_KHR_android_surface', + 'VK_ANDROID_external_memory_android_hardware_buffer' + ], commonSuppressExtensions ], + [ 'vulkan_ios.h', [ 'VK_MVK_ios_surface' ], commonSuppressExtensions ], + [ 'vulkan_macos.h', [ 'VK_MVK_macos_surface' ], commonSuppressExtensions ], + [ 'vulkan_mir.h', [ 'VK_KHR_mir_surface' ], commonSuppressExtensions ], + [ 'vulkan_vi.h', [ 'VK_NN_vi_surface' ], commonSuppressExtensions ], + [ 'vulkan_wayland.h', [ 'VK_KHR_wayland_surface' ], commonSuppressExtensions ], + [ 'vulkan_win32.h', [ 'VK_.*_win32(|_.*)' ], commonSuppressExtensions + [ 'VK_KHR_external_semaphore', 'VK_KHR_external_memory_capabilities', 'VK_KHR_external_fence', 'VK_KHR_external_fence_capabilities', 'VK_NV_external_memory_capabilities' ] ], + [ 'vulkan_xcb.h', [ 'VK_KHR_xcb_surface' ], commonSuppressExtensions ], + [ 'vulkan_xlib.h', [ 'VK_KHR_xlib_surface' ], commonSuppressExtensions ], + [ 'vulkan_xlib_xrandr.h', [ 'VK_EXT_acquire_xlib_display' ], commonSuppressExtensions ], + ] + + for platform in platforms: + headername = platform[0] + + allPlatformExtensions += platform[1] + + addPlatformExtensionsRE = makeREstring(platform[1] + platform[2]) + emitPlatformExtensionsRE = makeREstring(platform[1]) + + opts = CGeneratorOptions( + filename = headername, + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = None, + defaultExtensions = None, + addExtensions = addPlatformExtensionsRE, + removeExtensions = None, + emitExtensions = emitPlatformExtensionsRE, + prefixText = prefixStrings + vkPrefixStrings, + genFuncPointers = True, + protectFile = protectFile, + protectFeature = False, + protectProto = '#ifndef', + protectProtoStr = 'VK_NO_PROTOTYPES', + apicall = 'VKAPI_ATTR ', + apientry = 'VKAPI_CALL ', + apientryp = 'VKAPI_PTR *', + alignFuncParam = 48) + + genOpts[headername] = [ COutputGenerator, opts ] + + # Header for core API + extensions. + # To generate just the core API, + # change to 'defaultExtensions = None' below. + # + # By default this adds all enabled, non-platform extensions. + # It removes all platform extensions (from the platform headers options + # constructed above) as well as any explicitly specified removals. + + removeExtensionsPat = makeREstring(allPlatformExtensions + removeExtensions, None) + + genOpts['vulkan_core.h'] = [ + COutputGenerator, + CGeneratorOptions( + filename = 'vulkan_core.h', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = featuresPat, + defaultExtensions = defaultExtensions, + addExtensions = None, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat, + prefixText = prefixStrings + vkPrefixStrings, + genFuncPointers = True, + protectFile = protectFile, + protectFeature = False, + protectProto = '#ifndef', + protectProtoStr = 'VK_NO_PROTOTYPES', + apicall = 'VKAPI_ATTR ', + apientry = 'VKAPI_CALL ', + apientryp = 'VKAPI_PTR *', + alignFuncParam = 48) + ] + + # Unused - vulkan10.h target. + # It is possible to generate a header with just the Vulkan 1.0 + + # extension interfaces defined, but since the promoted KHR extensions + # are now defined in terms of the 1.1 interfaces, such a header is very + # similar to vulkan_core.h. + genOpts['vulkan10.h'] = [ + COutputGenerator, + CGeneratorOptions( + filename = 'vulkan10.h', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = 'VK_VERSION_1_0', + emitversions = 'VK_VERSION_1_0', + defaultExtensions = defaultExtensions, + addExtensions = None, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat, + prefixText = prefixStrings + vkPrefixStrings, + genFuncPointers = True, + protectFile = protectFile, + protectFeature = False, + protectProto = '#ifndef', + protectProtoStr = 'VK_NO_PROTOTYPES', + apicall = 'VKAPI_ATTR ', + apientry = 'VKAPI_CALL ', + apientryp = 'VKAPI_PTR *', + alignFuncParam = 48) + ] + + genOpts['alias.h'] = [ + COutputGenerator, + CGeneratorOptions( + filename = 'alias.h', + directory = directory, + apiname = 'vulkan', + profile = None, + versions = featuresPat, + emitversions = featuresPat, + defaultExtensions = defaultExtensions, + addExtensions = None, + removeExtensions = removeExtensionsPat, + emitExtensions = emitExtensionsPat, + prefixText = None, + genFuncPointers = False, + protectFile = False, + protectFeature = False, + protectProto = '', + protectProtoStr = '', + apicall = '', + apientry = '', + apientryp = '', + alignFuncParam = 36) + ] + +# Generate a target based on the options in the matching genOpts{} object. +# This is encapsulated in a function so it can be profiled and/or timed. +# The args parameter is an parsed argument object containing the following +# fields that are used: +# target - target to generate +# directory - directory to generate it in +# protect - True if re-inclusion wrappers should be created +# extensions - list of additional extensions to include in generated +# interfaces +def genTarget(args): + global genOpts + + # Create generator options with specified parameters + makeGenOpts(args) + + if (args.target in genOpts.keys()): + createGenerator = genOpts[args.target][0] + options = genOpts[args.target][1] + + if not args.quiet: + write('* Building', options.filename, file=sys.stderr) + write('* options.versions =', options.versions, file=sys.stderr) + write('* options.emitversions =', options.emitversions, file=sys.stderr) + write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr) + write('* options.addExtensions =', options.addExtensions, file=sys.stderr) + write('* options.removeExtensions =', options.removeExtensions, file=sys.stderr) + write('* options.emitExtensions =', options.emitExtensions, file=sys.stderr) + + startTimer(args.time) + gen = createGenerator(errFile=errWarn, + warnFile=errWarn, + diagFile=diag) + reg.setGenerator(gen) + reg.apiGen(options) + + if not args.quiet: + write('* Generated', options.filename, file=sys.stderr) + endTimer(args.time, '* Time to generate ' + options.filename + ' =') + else: + write('No generator options for unknown target:', + args.target, file=sys.stderr) + +# -feature name +# -extension name +# For both, "name" may be a single name, or a space-separated list +# of names, or a regular expression. +if __name__ == '__main__': + parser = argparse.ArgumentParser() + + parser.add_argument('-defaultExtensions', action='store', + default='vulkan', + help='Specify a single class of extensions to add to targets') + parser.add_argument('-extension', action='append', + default=[], + help='Specify an extension or extensions to add to targets') + parser.add_argument('-removeExtensions', action='append', + default=[], + help='Specify an extension or extensions to remove from targets') + parser.add_argument('-emitExtensions', action='append', + default=[], + help='Specify an extension or extensions to emit in targets') + parser.add_argument('-feature', action='append', + default=[], + help='Specify a core API feature name or names to add to targets') + parser.add_argument('-debug', action='store_true', + help='Enable debugging') + parser.add_argument('-dump', action='store_true', + help='Enable dump to stderr') + parser.add_argument('-diagfile', action='store', + default=None, + help='Write diagnostics to specified file') + parser.add_argument('-errfile', action='store', + default=None, + help='Write errors and warnings to specified file instead of stderr') + parser.add_argument('-noprotect', dest='protect', action='store_false', + help='Disable inclusion protection in output headers') + parser.add_argument('-profile', action='store_true', + help='Enable profiling') + parser.add_argument('-registry', action='store', + default='vk.xml', + help='Use specified registry file instead of vk.xml') + parser.add_argument('-time', action='store_true', + help='Enable timing') + parser.add_argument('-validate', action='store_true', + help='Enable group validation') + parser.add_argument('-o', action='store', dest='directory', + default='.', + help='Create target and related files in specified directory') + parser.add_argument('target', metavar='target', nargs='?', + help='Specify target') + parser.add_argument('-quiet', action='store_true', default=True, + help='Suppress script output during normal execution.') + parser.add_argument('-verbose', action='store_false', dest='quiet', default=True, + help='Enable script output during normal execution.') + + args = parser.parse_args() + + # This splits arguments which are space-separated lists + args.feature = [name for arg in args.feature for name in arg.split()] + args.extension = [name for arg in args.extension for name in arg.split()] + + # Load & parse registry + reg = Registry() + + startTimer(args.time) + tree = etree.parse(args.registry) + endTimer(args.time, '* Time to make ElementTree =') + + if args.debug: + pdb.run('reg.loadElementTree(tree)') + else: + startTimer(args.time) + reg.loadElementTree(tree) + endTimer(args.time, '* Time to parse ElementTree =') + + if (args.validate): + reg.validateGroups() + + if (args.dump): + write('* Dumping registry to regdump.txt', file=sys.stderr) + reg.dumpReg(filehandle = open('regdump.txt', 'w', encoding='utf-8')) + + # create error/warning & diagnostic files + if (args.errfile): + errWarn = open(args.errfile, 'w', encoding='utf-8') + else: + errWarn = sys.stderr + + if (args.diagfile): + diag = open(args.diagfile, 'w', encoding='utf-8') + else: + diag = None + + if (args.debug): + pdb.run('genTarget(args)') + elif (args.profile): + import cProfile, pstats + cProfile.run('genTarget(args)', 'profile.txt') + p = pstats.Stats('profile.txt') + p.strip_dirs().sort_stats('time').print_stats(50) + else: + genTarget(args) diff --git a/registry/reg.py b/registry/reg.py new file mode 100644 index 0000000..5a59e03 --- /dev/null +++ b/registry/reg.py @@ -0,0 +1,1059 @@ +#!/usr/bin/python3 -i +# +# Copyright (c) 2013-2018 The Khronos Group Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io,os,pdb,re,string,sys,copy +import xml.etree.ElementTree as etree +from collections import defaultdict + +# matchAPIProfile - returns whether an API and profile +# being generated matches an element's profile +# api - string naming the API to match +# profile - string naming the profile to match +# elem - Element which (may) have 'api' and 'profile' +# attributes to match to. +# If a tag is not present in the Element, the corresponding API +# or profile always matches. +# Otherwise, the tag must exactly match the API or profile. +# Thus, if 'profile' = core: +# with no attribute will match +# will match +# will not match +# Possible match conditions: +# Requested Element +# Profile Profile +# --------- -------- +# None None Always matches +# 'string' None Always matches +# None 'string' Does not match. Can't generate multiple APIs +# or profiles, so if an API/profile constraint +# is present, it must be asked for explicitly. +# 'string' 'string' Strings must match +# +# ** In the future, we will allow regexes for the attributes, +# not just strings, so that api="^(gl|gles2)" will match. Even +# this isn't really quite enough, we might prefer something +# like "gl(core)|gles1(common-lite)". +def matchAPIProfile(api, profile, elem): + """Match a requested API & profile name to a api & profile attributes of an Element""" + match = True + # Match 'api', if present + if ('api' in elem.attrib): + if (api == None): + raise UserWarning("No API requested, but 'api' attribute is present with value '" + + elem.get('api') + "'") + elif (api != elem.get('api')): + # Requested API doesn't match attribute + return False + if ('profile' in elem.attrib): + if (profile == None): + raise UserWarning("No profile requested, but 'profile' attribute is present with value '" + + elem.get('profile') + "'") + elif (profile != elem.get('profile')): + # Requested profile doesn't match attribute + return False + return True + +# BaseInfo - base class for information about a registry feature +# (type/group/enum/command/API/extension). +# required - should this feature be defined during header generation +# (has it been removed by a profile or version)? +# declared - has this feature been defined already? +# elem - etree Element for this feature +# resetState() - reset required/declared to initial values. Used +# prior to generating a new API interface. +# compareElem(info) - return True if self.elem and info.elem have the +# same definition. +class BaseInfo: + """Represents the state of a registry feature, used during API generation""" + def __init__(self, elem): + self.required = False + self.declared = False + self.elem = elem + def resetState(self): + self.required = False + self.declared = False + def compareElem(self, info): + # Just compares the tag and attributes. + # @@ This should be virtualized. In particular, comparing + # tags requires special-casing on the attributes, as 'extnumber' is + # only relevant when 'offset' is present. + selfKeys = sorted(self.elem.keys()) + infoKeys = sorted(info.elem.keys()) + + if selfKeys != infoKeys: + return False + + # Ignore value of 'extname', as this will inherently be different + # when redefining the same interface in different feature and/or + # extension blocks. + for key in selfKeys: + if (key != 'extname' and + (self.elem.get(key) != info.elem.get(key))): + return False + + return True + +# TypeInfo - registry information about a type. No additional state +# beyond BaseInfo is required. +class TypeInfo(BaseInfo): + """Represents the state of a registry type""" + def __init__(self, elem): + BaseInfo.__init__(self, elem) + self.additionalValidity = [] + self.removedValidity = [] + def resetState(self): + BaseInfo.resetState(self) + self.additionalValidity = [] + self.removedValidity = [] + +# GroupInfo - registry information about a group of related enums +# in an block, generally corresponding to a C "enum" type. +class GroupInfo(BaseInfo): + """Represents the state of a registry group""" + def __init__(self, elem): + BaseInfo.__init__(self, elem) + +# EnumInfo - registry information about an enum +# type - numeric type of the value of the tag +# ( '' for GLint, 'u' for GLuint, 'ull' for GLuint64 ) +class EnumInfo(BaseInfo): + """Represents the state of a registry enum""" + def __init__(self, elem): + BaseInfo.__init__(self, elem) + self.type = elem.get('type') + if (self.type == None): + self.type = '' + +# CmdInfo - registry information about a command +class CmdInfo(BaseInfo): + """Represents the state of a registry command""" + def __init__(self, elem): + BaseInfo.__init__(self, elem) + self.additionalValidity = [] + self.removedValidity = [] + def resetState(self): + BaseInfo.resetState(self) + self.additionalValidity = [] + self.removedValidity = [] + +# FeatureInfo - registry information about an API +# or +# name - feature name string (e.g. 'VK_KHR_surface') +# version - feature version number (e.g. 1.2). +# features are unversioned and assigned version number 0. +# ** This is confusingly taken from the 'number' attribute of . +# Needs fixing. +# number - extension number, used for ordering and for +# assigning enumerant offsets. features do +# not have extension numbers and are assigned number 0. +# category - category, e.g. VERSION or khr/vendor tag +# emit - has this feature been defined already? +class FeatureInfo(BaseInfo): + """Represents the state of an API feature (version/extension)""" + def __init__(self, elem): + BaseInfo.__init__(self, elem) + self.name = elem.get('name') + # Determine element category (vendor). Only works + # for elements. + if (elem.tag == 'feature'): + self.category = 'VERSION' + self.version = elem.get('name') + self.versionNumber = elem.get('number') + self.number = "0" + self.supported = None + else: + self.category = self.name.split('_', 2)[1] + self.version = "0" + self.versionNumber = "0" + self.number = elem.get('number') + self.supported = elem.get('supported') + self.emit = False + +from generator import write, GeneratorOptions, OutputGenerator + +# Registry - object representing an API registry, loaded from an XML file +# Members +# tree - ElementTree containing the root +# typedict - dictionary of TypeInfo objects keyed by type name +# groupdict - dictionary of GroupInfo objects keyed by group name +# enumdict - dictionary of EnumInfo objects keyed by enum name +# cmddict - dictionary of CmdInfo objects keyed by command name +# apidict - dictionary of Elements keyed by API name +# extensions - list of Elements +# extdict - dictionary of Elements keyed by extension name +# gen - OutputGenerator object used to write headers / messages +# genOpts - GeneratorOptions object used to control which +# fetures to write and how to format them +# emitFeatures - True to actually emit features for a version / extension, +# or False to just treat them as emitted +# breakPat - regexp pattern to break on when generatng names +# Public methods +# loadElementTree(etree) - load registry from specified ElementTree +# loadFile(filename) - load registry from XML file +# setGenerator(gen) - OutputGenerator to use +# breakOnName() - specify a feature name regexp to break on when +# generating features. +# parseTree() - parse the registry once loaded & create dictionaries +# dumpReg(maxlen, filehandle) - diagnostic to dump the dictionaries +# to specified file handle (default stdout). Truncates type / +# enum / command elements to maxlen characters (default 80) +# generator(g) - specify the output generator object +# apiGen(apiname, genOpts) - generate API headers for the API type +# and profile specified in genOpts, but only for the versions and +# extensions specified there. +# apiReset() - call between calls to apiGen() to reset internal state +# Private methods +# addElementInfo(elem,info,infoName,dictionary) - add feature info to dict +# lookupElementInfo(fname,dictionary) - lookup feature info in dict +class Registry: + """Represents an API registry loaded from XML""" + def __init__(self): + self.tree = None + self.typedict = {} + self.groupdict = {} + self.enumdict = {} + self.cmddict = {} + self.apidict = {} + self.extensions = [] + self.requiredextensions = [] # Hack - can remove it after validity generator goes away + self.validextensionstructs = defaultdict(list) + self.extdict = {} + # A default output generator, so commands prior to apiGen can report + # errors via the generator object. + self.gen = OutputGenerator() + self.genOpts = None + self.emitFeatures = False + self.breakPat = None + # self.breakPat = re.compile('VkFenceImportFlagBits.*') + def loadElementTree(self, tree): + """Load ElementTree into a Registry object and parse it""" + self.tree = tree + self.parseTree() + def loadFile(self, file): + """Load an API registry XML file into a Registry object and parse it""" + self.tree = etree.parse(file) + self.parseTree() + def setGenerator(self, gen): + """Specify output generator object. None restores the default generator""" + self.gen = gen + self.gen.setRegistry(self) + + # addElementInfo - add information about an element to the + # corresponding dictionary + # elem - ///// Element + # info - corresponding {Type|Group|Enum|Cmd|Feature}Info object + # infoName - 'type' / 'group' / 'enum' / 'command' / 'feature' / 'extension' + # dictionary - self.{type|group|enum|cmd|api|ext}dict + # If the Element has an 'api' attribute, the dictionary key is the + # tuple (name,api). If not, the key is the name. 'name' is an + # attribute of the Element + def addElementInfo(self, elem, info, infoName, dictionary): + # self.gen.logMsg('diag', 'Adding ElementInfo.required =', + # info.required, 'name =', elem.get('name')) + + if ('api' in elem.attrib): + key = (elem.get('name'),elem.get('api')) + else: + key = elem.get('name') + if key in dictionary: + if not dictionary[key].compareElem(info): + self.gen.logMsg('warn', 'Attempt to redefine', key, + 'with different value (this may be benign)') + #else: + # self.gen.logMsg('warn', 'Benign redefinition of', key, + # 'with identical value') + else: + dictionary[key] = info + # + # lookupElementInfo - find a {Type|Enum|Cmd}Info object by name. + # If an object qualified by API name exists, use that. + # fname - name of type / enum / command + # dictionary - self.{type|enum|cmd}dict + def lookupElementInfo(self, fname, dictionary): + key = (fname, self.genOpts.apiname) + if (key in dictionary): + # self.gen.logMsg('diag', 'Found API-specific element for feature', fname) + return dictionary[key] + elif (fname in dictionary): + # self.gen.logMsg('diag', 'Found generic element for feature', fname) + return dictionary[fname] + else: + return None + def breakOnName(self, regexp): + self.breakPat = re.compile(regexp) + def parseTree(self): + """Parse the registry Element, once created""" + # This must be the Element for the root + self.reg = self.tree.getroot() + # + # Create dictionary of registry types from toplevel tags + # and add 'name' attribute to each tag (where missing) + # based on its element. + # + # There's usually one block; more are OK + # Required attributes: 'name' or nested tag contents + self.typedict = {} + for type in self.reg.findall('types/type'): + # If the doesn't already have a 'name' attribute, set + # it from contents of its tag. + if (type.get('name') == None): + type.attrib['name'] = type.find('name').text + self.addElementInfo(type, TypeInfo(type), 'type', self.typedict) + # + # Create dictionary of registry enum groups from tags. + # + # Required attributes: 'name'. If no name is given, one is + # generated, but that group can't be identified and turned into an + # enum type definition - it's just a container for tags. + self.groupdict = {} + for group in self.reg.findall('enums'): + self.addElementInfo(group, GroupInfo(group), 'group', self.groupdict) + # + # Create dictionary of registry enums from tags + # + # tags usually define different namespaces for the values + # defined in those tags, but the actual names all share the + # same dictionary. + # Required attributes: 'name', 'value' + # For containing which have type="enum" or type="bitmask", + # tag all contained s are required. This is a stopgap until + # a better scheme for tagging core and extension enums is created. + self.enumdict = {} + for enums in self.reg.findall('enums'): + required = (enums.get('type') != None) + for enum in enums.findall('enum'): + enumInfo = EnumInfo(enum) + enumInfo.required = required + self.addElementInfo(enum, enumInfo, 'enum', self.enumdict) + # self.gen.logMsg('diag', 'parseTree: marked req =', + # required, 'for', enum.get('name')) + # + # Create dictionary of registry commands from tags + # and add 'name' attribute to each tag (where missing) + # based on its element. + # + # There's usually only one block; more are OK. + # Required attributes: 'name' or tag contents + self.cmddict = {} + # List of commands which alias others. Contains + # [ aliasName, element ] + # for each alias + cmdAlias = [] + for cmd in self.reg.findall('commands/command'): + # If the doesn't already have a 'name' attribute, set + # it from contents of its tag. + name = cmd.get('name') + if name == None: + name = cmd.attrib['name'] = cmd.find('proto/name').text + ci = CmdInfo(cmd) + self.addElementInfo(cmd, ci, 'command', self.cmddict) + alias = cmd.get('alias') + if alias: + cmdAlias.append([name, alias, cmd]) + # Now loop over aliases, injecting a copy of the aliased command's + # Element with the aliased prototype name replaced with the command + # name - if it exists. + for (name, alias, cmd) in cmdAlias: + if alias in self.cmddict: + #@ pdb.set_trace() + aliasInfo = self.cmddict[alias] + cmdElem = copy.deepcopy(aliasInfo.elem) + cmdElem.find('proto/name').text = name + cmdElem.attrib['name'] = name + cmdElem.attrib['alias'] = alias + ci = CmdInfo(cmdElem) + # Replace the dictionary entry for the CmdInfo element + self.cmddict[name] = ci + + #@ newString = etree.tostring(base, encoding="unicode").replace(aliasValue, aliasName) + #@elem.append(etree.fromstring(replacement)) + else: + self.gen.logMsg('warn', 'No matching found for command', + cmd.get('name'), 'alias', alias) + + # + # Create dictionaries of API and extension interfaces + # from toplevel and tags. + # + self.apidict = {} + for feature in self.reg.findall('feature'): + featureInfo = FeatureInfo(feature) + self.addElementInfo(feature, featureInfo, 'feature', self.apidict) + + # Add additional enums defined only in tags + # to the corresponding core type. + # When seen here, the element, processed to contain the + # numeric enum value, is added to the corresponding + # element, as well as adding to the enum dictionary. It is + # *removed* from the element it is introduced in. + # Not doing this will cause spurious genEnum() + # calls to be made in output generation, and it's easier + # to handle here than in genEnum(). + # + # In lxml.etree, an Element can have only one parent, so the + # append() operation also removes the element. But in Python's + # ElementTree package, an Element can have multiple parents. So + # it must be explicitly removed from the tag, leading + # to the nested loop traversal of / elements + # below. + # + # This code also adds a 'version' attribute containing the + # api version. + # + # For tags which are actually just constants, if there's + # no 'extends' tag but there is a 'value' or 'bitpos' tag, just + # add an EnumInfo record to the dictionary. That works because + # output generation of constants is purely dependency-based, and + # doesn't need to iterate through the XML tags. + # + for elem in feature.findall('require'): + for enum in elem.findall('enum'): + addEnumInfo = False + groupName = enum.get('extends') + if (groupName != None): + # self.gen.logMsg('diag', 'Found extension enum', + # enum.get('name')) + # Add version number attribute to the element + enum.attrib['version'] = featureInfo.version + # Look up the GroupInfo with matching groupName + if (groupName in self.groupdict.keys()): + # self.gen.logMsg('diag', 'Matching group', + # groupName, 'found, adding element...') + gi = self.groupdict[groupName] + gi.elem.append(enum) + # Remove element from parent tag + # This should be a no-op in lxml.etree + elem.remove(enum) + else: + self.gen.logMsg('warn', 'NO matching group', + groupName, 'for enum', enum.get('name'), 'found.') + addEnumInfo = True + elif (enum.get('value') or enum.get('bitpos') or enum.get('alias')): + # self.gen.logMsg('diag', 'Adding extension constant "enum"', + # enum.get('name')) + addEnumInfo = True + if (addEnumInfo): + enumInfo = EnumInfo(enum) + self.addElementInfo(enum, enumInfo, 'enum', self.enumdict) + + self.extensions = self.reg.findall('extensions/extension') + self.extdict = {} + for feature in self.extensions: + featureInfo = FeatureInfo(feature) + self.addElementInfo(feature, featureInfo, 'extension', self.extdict) + + # Add additional enums defined only in tags + # to the corresponding core type. + # Algorithm matches that of enums in a "feature" tag as above. + # + # This code also adds a 'extnumber' attribute containing the + # extension number, used for enumerant value calculation. + # + for elem in feature.findall('require'): + for enum in elem.findall('enum'): + addEnumInfo = False + groupName = enum.get('extends') + if (groupName != None): + # self.gen.logMsg('diag', 'Found extension enum', + # enum.get('name')) + + # Add block's extension number attribute to + # the element unless specified explicitly, such + # as when redefining an enum in another extension. + extnumber = enum.get('extnumber') + if not extnumber: + enum.attrib['extnumber'] = featureInfo.number + + enum.attrib['extname'] = featureInfo.name + enum.attrib['supported'] = featureInfo.supported + # Look up the GroupInfo with matching groupName + if (groupName in self.groupdict.keys()): + # self.gen.logMsg('diag', 'Matching group', + # groupName, 'found, adding element...') + gi = self.groupdict[groupName] + gi.elem.append(enum) + # Remove element from parent tag + # This should be a no-op in lxml.etree + elem.remove(enum) + else: + self.gen.logMsg('warn', 'NO matching group', + groupName, 'for enum', enum.get('name'), 'found.') + addEnumInfo = True + elif (enum.get('value') or enum.get('bitpos') or enum.get('alias')): + # self.gen.logMsg('diag', 'Adding extension constant "enum"', + # enum.get('name')) + addEnumInfo = True + if (addEnumInfo): + enumInfo = EnumInfo(enum) + self.addElementInfo(enum, enumInfo, 'enum', self.enumdict) + + # Construct a "validextensionstructs" list for parent structures + # based on "structextends" tags in child structures + for type in self.reg.findall('types/type'): + parentStructs = type.get('structextends') + if (parentStructs != None): + for parent in parentStructs.split(','): + # self.gen.logMsg('diag', type.get('name'), 'extends', parent) + self.validextensionstructs[parent].append(type.get('name')) + # Sort the lists so they don't depend on the XML order + for parent in self.validextensionstructs: + self.validextensionstructs[parent].sort() + + def dumpReg(self, maxlen = 120, filehandle = sys.stdout): + """Dump all the dictionaries constructed from the Registry object""" + write('***************************************', file=filehandle) + write(' ** Dumping Registry contents **', file=filehandle) + write('***************************************', file=filehandle) + write('// Types', file=filehandle) + for name in self.typedict: + tobj = self.typedict[name] + write(' Type', name, '->', etree.tostring(tobj.elem)[0:maxlen], file=filehandle) + write('// Groups', file=filehandle) + for name in self.groupdict: + gobj = self.groupdict[name] + write(' Group', name, '->', etree.tostring(gobj.elem)[0:maxlen], file=filehandle) + write('// Enums', file=filehandle) + for name in self.enumdict: + eobj = self.enumdict[name] + write(' Enum', name, '->', etree.tostring(eobj.elem)[0:maxlen], file=filehandle) + write('// Commands', file=filehandle) + for name in self.cmddict: + cobj = self.cmddict[name] + write(' Command', name, '->', etree.tostring(cobj.elem)[0:maxlen], file=filehandle) + write('// APIs', file=filehandle) + for key in self.apidict: + write(' API Version ', key, '->', + etree.tostring(self.apidict[key].elem)[0:maxlen], file=filehandle) + write('// Extensions', file=filehandle) + for key in self.extdict: + write(' Extension', key, '->', + etree.tostring(self.extdict[key].elem)[0:maxlen], file=filehandle) + # write('***************************************', file=filehandle) + # write(' ** Dumping XML ElementTree **', file=filehandle) + # write('***************************************', file=filehandle) + # write(etree.tostring(self.tree.getroot(),pretty_print=True), file=filehandle) + # + # typename - name of type + # required - boolean (to tag features as required or not) + def markTypeRequired(self, typename, required): + """Require (along with its dependencies) or remove (but not its dependencies) a type""" + self.gen.logMsg('diag', 'tagging type:', typename, '-> required =', required) + # Get TypeInfo object for tag corresponding to typename + type = self.lookupElementInfo(typename, self.typedict) + if (type != None): + if (required): + # Tag type dependencies in 'alias' and 'required' attributes as + # required. This DOES NOT un-tag dependencies in a + # tag. See comments in markRequired() below for the reason. + for attrib in [ 'requires', 'alias' ]: + depname = type.elem.get(attrib) + if depname: + self.gen.logMsg('diag', 'Generating dependent type', + depname, 'for', attrib, 'type', typename) + self.markTypeRequired(depname, required) + # Tag types used in defining this type (e.g. in nested + # tags) + # Look for in entire tree, + # not just immediate children + for subtype in type.elem.findall('.//type'): + self.gen.logMsg('diag', 'markRequired: type requires dependent ', subtype.text) + self.markTypeRequired(subtype.text, required) + # Tag enums used in defining this type, for example in + # member[MEMBER_SIZE] + for subenum in type.elem.findall('.//enum'): + self.gen.logMsg('diag', 'markRequired: type requires dependent ', subenum.text) + self.markEnumRequired(subenum.text, required) + type.required = required + else: + self.gen.logMsg('warn', 'type:', typename , 'IS NOT DEFINED') + # + # enumname - name of enum + # required - boolean (to tag features as required or not) + def markEnumRequired(self, enumname, required): + self.gen.logMsg('diag', 'tagging enum:', enumname, '-> required =', required) + enum = self.lookupElementInfo(enumname, self.enumdict) + if (enum != None): + enum.required = required + # Tag enum dependencies in 'alias' attribute as required + depname = enum.elem.get('alias') + if depname: + self.gen.logMsg('diag', 'Generating dependent enum', + depname, 'for alias', enumname, 'required =', enum.required) + self.markEnumRequired(depname, required) + else: + self.gen.logMsg('warn', 'enum:', enumname , 'IS NOT DEFINED') + # + # cmdname - name of command + # required - boolean (to tag features as required or not) + def markCmdRequired(self, cmdname, required): + self.gen.logMsg('diag', 'tagging command:', cmdname, '-> required =', required) + cmd = self.lookupElementInfo(cmdname, self.cmddict) + if (cmd != None): + cmd.required = required + # Tag command dependencies in 'alias' attribute as required + depname = cmd.elem.get('alias') + if depname: + self.gen.logMsg('diag', 'Generating dependent command', + depname, 'for alias', cmdname) + self.markCmdRequired(depname, required) + # Tag all parameter types of this command as required. + # This DOES NOT remove types of commands in a + # tag, because many other commands may use the same type. + # We could be more clever and reference count types, + # instead of using a boolean. + if (required): + # Look for in entire tree, + # not just immediate children + for type in cmd.elem.findall('.//type'): + self.gen.logMsg('diag', 'markRequired: command implicitly requires dependent type', type.text) + self.markTypeRequired(type.text, required) + else: + self.gen.logMsg('warn', 'command:', name, 'IS NOT DEFINED') + # + # features - Element for or tag + # required - boolean (to tag features as required or not) + def markRequired(self, features, required): + """Require or remove features specified in the Element""" + self.gen.logMsg('diag', 'markRequired (features = , required =', required, ')') + # Loop over types, enums, and commands in the tag + # @@ It would be possible to respect 'api' and 'profile' attributes + # in individual features, but that's not done yet. + for typeElem in features.findall('type'): + self.markTypeRequired(typeElem.get('name'), required) + for enumElem in features.findall('enum'): + self.markEnumRequired(enumElem.get('name'), required) + for cmdElem in features.findall('command'): + self.markCmdRequired(cmdElem.get('name'), required) + # + # interface - Element for or , containing + # and tags + # api - string specifying API name being generated + # profile - string specifying API profile being generated + def requireAndRemoveFeatures(self, interface, api, profile): + """Process and tags for a or """ + # marks things that are required by this version/profile + for feature in interface.findall('require'): + if (matchAPIProfile(api, profile, feature)): + self.markRequired(feature,True) + # marks things that are removed by this version/profile + for feature in interface.findall('remove'): + if (matchAPIProfile(api, profile, feature)): + self.markRequired(feature,False) + + def assignAdditionalValidity(self, interface, api, profile): + # + # Loop over all usage inside all tags. + for feature in interface.findall('require'): + if (matchAPIProfile(api, profile, feature)): + for v in feature.findall('usage'): + if v.get('command'): + self.cmddict[v.get('command')].additionalValidity.append(copy.deepcopy(v)) + if v.get('struct'): + self.typedict[v.get('struct')].additionalValidity.append(copy.deepcopy(v)) + + # + # Loop over all usage inside all tags. + for feature in interface.findall('remove'): + if (matchAPIProfile(api, profile, feature)): + for v in feature.findall('usage'): + if v.get('command'): + self.cmddict[v.get('command')].removedValidity.append(copy.deepcopy(v)) + if v.get('struct'): + self.typedict[v.get('struct')].removedValidity.append(copy.deepcopy(v)) + + # + # generateFeature - generate a single type / enum group / enum / command, + # and all its dependencies as needed. + # fname - name of feature (//) + # ftype - type of feature, 'type' | 'enum' | 'command' + # dictionary - of *Info objects - self.{type|enum|cmd}dict + def generateFeature(self, fname, ftype, dictionary): + #@ # Break to debugger on matching name pattern + #@ if self.breakPat and re.match(self.breakPat, fname): + #@ pdb.set_trace() + + self.gen.logMsg('diag', 'generateFeature: generating', ftype, fname) + f = self.lookupElementInfo(fname, dictionary) + if (f == None): + # No such feature. This is an error, but reported earlier + self.gen.logMsg('diag', 'No entry found for feature', fname, + 'returning!') + return + # + # If feature isn't required, or has already been declared, return + if (not f.required): + self.gen.logMsg('diag', 'Skipping', ftype, fname, '(not required)') + return + if (f.declared): + self.gen.logMsg('diag', 'Skipping', ftype, fname, '(already declared)') + return + # Always mark feature declared, as though actually emitted + f.declared = True + + # Determine if this is an alias, and of what, if so + alias = f.elem.get('alias') + if alias: + self.gen.logMsg('diag', fname, 'is an alias of', alias) + + # + # Pull in dependent declaration(s) of the feature. + # For types, there may be one type in the 'required' attribute of + # the element, one in the 'alias' attribute, and many in + # imbedded and tags within the element. + # For commands, there may be many in tags within the element. + # For enums, no dependencies are allowed (though perhaps if you + # have a uint64 enum, it should require that type). + genProc = None + if (ftype == 'type'): + genProc = self.gen.genType + + # Generate type dependencies in 'alias' and 'required' attributes + if alias: + self.generateFeature(alias, 'type', self.typedict) + requires = f.elem.get('requires') + if requires: + self.generateFeature(requires, 'type', self.typedict) + + # Generate types used in defining this type (e.g. in nested + # tags) + # Look for in entire tree, + # not just immediate children + for subtype in f.elem.findall('.//type'): + self.gen.logMsg('diag', 'Generating required dependent ', + subtype.text) + self.generateFeature(subtype.text, 'type', self.typedict) + + # Generate enums used in defining this type, for example in + # member[MEMBER_SIZE] + for subtype in f.elem.findall('.//enum'): + self.gen.logMsg('diag', 'Generating required dependent ', + subtype.text) + self.generateFeature(subtype.text, 'enum', self.enumdict) + + # If the type is an enum group, look up the corresponding + # group in the group dictionary and generate that instead. + if (f.elem.get('category') == 'enum'): + self.gen.logMsg('diag', 'Type', fname, 'is an enum group, so generate that instead') + group = self.lookupElementInfo(fname, self.groupdict) + if alias != None: + # An alias of another group name. + # Pass to genGroup with 'alias' parameter = aliased name + self.gen.logMsg('diag', 'Generating alias', fname, + 'for enumerated type', alias) + # Now, pass the *aliased* GroupInfo to the genGroup, but + # with an additional parameter which is the alias name. + genProc = self.gen.genGroup + f = self.lookupElementInfo(alias, self.groupdict) + elif group == None: + self.gen.logMsg('warn', 'Skipping enum type', fname, + ': No matching enumerant group') + return + else: + genProc = self.gen.genGroup + f = group + + #@ The enum group is not ready for generation. At this + #@ point, it contains all tags injected by + #@ tags without any verification of whether + #@ they're required or not. It may also contain + #@ duplicates injected by multiple consistent + #@ definitions of an . + + #@ Pass over each enum, marking its enumdict[] entry as + #@ required or not. Mark aliases of enums as required, + #@ too. + + enums = group.elem.findall('enum') + + self.gen.logMsg('diag', 'generateFeature: checking enums for group', fname) + + # Check for required enums, including aliases + # LATER - Check for, report, and remove duplicates? + enumAliases = [] + for elem in enums: + name = elem.get('name') + + required = False + + extname = elem.get('extname') + version = elem.get('version') + if extname is not None: + # 'supported' attribute was injected when the element was + # moved into the group in Registry.parseTree() + if self.genOpts.defaultExtensions == elem.get('supported'): + required = True + elif re.match(self.genOpts.addExtensions, extname) is not None: + required = True + elif version is not None: + required = re.match(self.genOpts.emitversions, version) is not None + else: + required = True + + self.gen.logMsg('diag', '* required =', required, 'for', name) + if required: + # Mark this element as required (in the element, not the EnumInfo) + elem.attrib['required'] = 'true' + # If it's an alias, track that for later use + enumAlias = elem.get('alias') + if enumAlias: + enumAliases.append(enumAlias) + for elem in enums: + name = elem.get('name') + if name in enumAliases: + elem.attrib['required'] = 'true' + self.gen.logMsg('diag', '* also need to require alias', name) + elif (ftype == 'command'): + # Generate command dependencies in 'alias' attribute + if alias: + self.generateFeature(alias, 'command', self.cmddict) + + genProc = self.gen.genCmd + for type in f.elem.findall('.//type'): + depname = type.text + self.gen.logMsg('diag', 'Generating required parameter type', + depname) + self.generateFeature(depname, 'type', self.typedict) + elif (ftype == 'enum'): + # Generate enum dependencies in 'alias' attribute + if alias: + self.generateFeature(alias, 'enum', self.enumdict) + genProc = self.gen.genEnum + + # Actually generate the type only if emitting declarations + if self.emitFeatures: + self.gen.logMsg('diag', 'Emitting', ftype, fname, 'declaration') + genProc(f, fname, alias) + else: + self.gen.logMsg('diag', 'Skipping', ftype, fname, + '(should not be emitted)') + # + # generateRequiredInterface - generate all interfaces required + # by an API version or extension + # interface - Element for or + def generateRequiredInterface(self, interface): + """Generate required C interface for specified API version/extension""" + + # + # Loop over all features inside all tags. + for features in interface.findall('require'): + for t in features.findall('type'): + self.generateFeature(t.get('name'), 'type', self.typedict) + for e in features.findall('enum'): + self.generateFeature(e.get('name'), 'enum', self.enumdict) + for c in features.findall('command'): + self.generateFeature(c.get('name'), 'command', self.cmddict) + + # + # apiGen(genOpts) - generate interface for specified versions + # genOpts - GeneratorOptions object with parameters used + # by the Generator object. + def apiGen(self, genOpts): + """Generate interfaces for the specified API type and range of versions""" + # + self.gen.logMsg('diag', '*******************************************') + self.gen.logMsg('diag', ' Registry.apiGen file:', genOpts.filename, + 'api:', genOpts.apiname, + 'profile:', genOpts.profile) + self.gen.logMsg('diag', '*******************************************') + # + self.genOpts = genOpts + # Reset required/declared flags for all features + self.apiReset() + # + # Compile regexps used to select versions & extensions + regVersions = re.compile(self.genOpts.versions) + regEmitVersions = re.compile(self.genOpts.emitversions) + regAddExtensions = re.compile(self.genOpts.addExtensions) + regRemoveExtensions = re.compile(self.genOpts.removeExtensions) + regEmitExtensions = re.compile(self.genOpts.emitExtensions) + # + # Get all matching API feature names & add to list of FeatureInfo + # Note we used to select on feature version attributes, not names. + features = [] + apiMatch = False + for key in self.apidict: + fi = self.apidict[key] + api = fi.elem.get('api') + if (api == self.genOpts.apiname): + apiMatch = True + if (regVersions.match(fi.name)): + # Matches API & version #s being generated. Mark for + # emission and add to the features[] list . + # @@ Could use 'declared' instead of 'emit'? + fi.emit = (regEmitVersions.match(fi.name) != None) + features.append(fi) + if (not fi.emit): + self.gen.logMsg('diag', 'NOT tagging feature api =', api, + 'name =', fi.name, 'version =', fi.version, + 'for emission (does not match emitversions pattern)') + else: + self.gen.logMsg('diag', 'Including feature api =', api, + 'name =', fi.name, 'version =', fi.version, + 'for emission (matches emitversions pattern)') + else: + self.gen.logMsg('diag', 'NOT including feature api =', api, + 'name =', fi.name, 'version =', fi.version, + '(does not match requested versions)') + else: + self.gen.logMsg('diag', 'NOT including feature api =', api, + 'name =', fi.name, + '(does not match requested API)') + if (not apiMatch): + self.gen.logMsg('warn', 'No matching API versions found!') + # + # Get all matching extensions, in order by their extension number, + # and add to the list of features. + # Start with extensions tagged with 'api' pattern matching the API + # being generated. Add extensions matching the pattern specified in + # regExtensions, then remove extensions matching the pattern + # specified in regRemoveExtensions + for (extName,ei) in sorted(self.extdict.items(),key = lambda x : x[1].number): + extName = ei.name + include = False + # + # Include extension if defaultExtensions is not None and if the + # 'supported' attribute matches defaultExtensions. The regexp in + # 'supported' must exactly match defaultExtensions, so bracket + # it with ^(pat)$. + pat = '^(' + ei.elem.get('supported') + ')$' + if (self.genOpts.defaultExtensions and + re.match(pat, self.genOpts.defaultExtensions)): + self.gen.logMsg('diag', 'Including extension', + extName, "(defaultExtensions matches the 'supported' attribute)") + include = True + # + # Include additional extensions if the extension name matches + # the regexp specified in the generator options. This allows + # forcing extensions into an interface even if they're not + # tagged appropriately in the registry. + if (regAddExtensions.match(extName) != None): + self.gen.logMsg('diag', 'Including extension', + extName, '(matches explicitly requested extensions to add)') + include = True + # Remove extensions if the name matches the regexp specified + # in generator options. This allows forcing removal of + # extensions from an interface even if they're tagged that + # way in the registry. + if (regRemoveExtensions.match(extName) != None): + self.gen.logMsg('diag', 'Removing extension', + extName, '(matches explicitly requested extensions to remove)') + include = False + # + # If the extension is to be included, add it to the + # extension features list. + if (include): + ei.emit = (regEmitExtensions.match(extName) != None) + features.append(ei) + if (not ei.emit): + self.gen.logMsg('diag', 'NOT tagging extension', + extName, + 'for emission (does not match emitextensions pattern)') + # Hack - can be removed when validity generator goes away + # (Jon) I'm not sure what this does, or if it should respect + # the ei.emit flag above. + self.requiredextensions.append(extName) + else: + self.gen.logMsg('diag', 'NOT including extension', + extName, '(does not match api attribute or explicitly requested extensions)') + # + # Sort the extension features list, if a sort procedure is defined + if (self.genOpts.sortProcedure): + self.genOpts.sortProcedure(features) + # + # Pass 1: loop over requested API versions and extensions tagging + # types/commands/features as required (in an block) or no + # longer required (in an block). It is possible to remove + # a feature in one version and restore it later by requiring it in + # a later version. + # If a profile other than 'None' is being generated, it must + # match the profile attribute (if any) of the and + # tags. + self.gen.logMsg('diag', '*******PASS 1: TAG FEATURES **********') + for f in features: + self.gen.logMsg('diag', 'PASS 1: Tagging required and removed features for', + f.name) + self.requireAndRemoveFeatures(f.elem, self.genOpts.apiname, self.genOpts.profile) + self.assignAdditionalValidity(f.elem, self.genOpts.apiname, self.genOpts.profile) + # + # Pass 2: loop over specified API versions and extensions printing + # declarations for required things which haven't already been + # generated. + self.gen.logMsg('diag', '*******PASS 2: GENERATE INTERFACES FOR FEATURES **********') + self.gen.beginFile(self.genOpts) + for f in features: + self.gen.logMsg('diag', 'PASS 2: Generating interface for', + f.name) + emit = self.emitFeatures = f.emit + if (not emit): + self.gen.logMsg('diag', 'PASS 2: NOT declaring feature', + f.elem.get('name'), 'because it is not tagged for emission') + # Generate the interface (or just tag its elements as having been + # emitted, if they haven't been). + self.gen.beginFeature(f.elem, emit) + self.generateRequiredInterface(f.elem) + self.gen.endFeature() + self.gen.endFile() + # + # apiReset - use between apiGen() calls to reset internal state + # + def apiReset(self): + """Reset type/enum/command dictionaries before generating another API""" + for type in self.typedict: + self.typedict[type].resetState() + for enum in self.enumdict: + self.enumdict[enum].resetState() + for cmd in self.cmddict: + self.cmddict[cmd].resetState() + for cmd in self.apidict: + self.apidict[cmd].resetState() + # + # validateGroups - check that group= attributes match actual groups + # + def validateGroups(self): + """Validate group= attributes on and tags""" + # Keep track of group names not in tags + badGroup = {} + self.gen.logMsg('diag', 'VALIDATING GROUP ATTRIBUTES ***') + for cmd in self.reg.findall('commands/command'): + proto = cmd.find('proto') + funcname = cmd.find('proto/name').text + if ('group' in proto.attrib.keys()): + group = proto.get('group') + # self.gen.logMsg('diag', 'Command ', funcname, ' has return group ', group) + if (group not in self.groupdict.keys()): + # self.gen.logMsg('diag', 'Command ', funcname, ' has UNKNOWN return group ', group) + if (group not in badGroup.keys()): + badGroup[group] = 1 + else: + badGroup[group] = badGroup[group] + 1 + for param in cmd.findall('param'): + pname = param.find('name') + if (pname != None): + pname = pname.text + else: + pname = type.get('name') + if ('group' in param.attrib.keys()): + group = param.get('group') + if (group not in self.groupdict.keys()): + # self.gen.logMsg('diag', 'Command ', funcname, ' param ', pname, ' has UNKNOWN group ', group) + if (group not in badGroup.keys()): + badGroup[group] = 1 + else: + badGroup[group] = badGroup[group] + 1 + if (len(badGroup.keys()) > 0): + self.gen.logMsg('diag', 'SUMMARY OF UNRECOGNIZED GROUPS ***') + for key in sorted(badGroup.keys()): + self.gen.logMsg('diag', ' ', key, ' occurred ', badGroup[key], ' times') diff --git a/registry/validusage.json b/registry/validusage.json new file mode 100644 index 0000000..d5bb46d --- /dev/null +++ b/registry/validusage.json @@ -0,0 +1,18392 @@ +{ + "version info": { + "schema version": 2, + "api version": "1.1.74", + "comment": "from git branch: master commit: c51545d33f4fd791dc4109d0d338e79b572f6286", + "date": "2018-04-23 18:29:18Z" + }, + "validation": { + "vkGetInstanceProcAddr": { + "core": [ + { + "vuid": "VUID-vkGetInstanceProcAddr-instance-parameter", + "text": " If instance is not NULL, instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkGetInstanceProcAddr-pName-parameter", + "text": " pName must be a null-terminated UTF-8 string" + } + ] + }, + "vkGetDeviceProcAddr": { + "core": [ + { + "vuid": "VUID-vkGetDeviceProcAddr-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceProcAddr-pName-parameter", + "text": " pName must be a null-terminated UTF-8 string" + } + ] + }, + "vkEnumerateInstanceVersion": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkEnumerateInstanceVersion-pApiVersion-parameter", + "text": " pApiVersion must be a valid pointer to a uint32_t value" + } + ] + }, + "vkCreateInstance": { + "core": [ + { + "vuid": "VUID-vkCreateInstance-ppEnabledExtensionNames-01388", + "text": " All &amp;lt;&amp;lt;extended-functionality-extensions-dependencies, required extensions&amp;gt;&amp;gt; for each extension in the VkInstanceCreateInfo::ppEnabledExtensionNames list must also be present in that list." + }, + { + "vuid": "VUID-vkCreateInstance-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkInstanceCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateInstance-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateInstance-pInstance-parameter", + "text": " pInstance must be a valid pointer to a VkInstance handle" + } + ] + }, + "VkInstanceCreateInfo": { + "core": [ + { + "vuid": "VUID-VkInstanceCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO" + }, + { + "vuid": "VUID-VkInstanceCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDebugReportCallbackCreateInfoEXT, VkDebugUtilsMessengerCreateInfoEXT, or VkValidationFlagsEXT" + }, + { + "vuid": "VUID-VkInstanceCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkInstanceCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkInstanceCreateInfo-pApplicationInfo-parameter", + "text": " If pApplicationInfo is not NULL, pApplicationInfo must be a valid pointer to a valid VkApplicationInfo structure" + }, + { + "vuid": "VUID-VkInstanceCreateInfo-ppEnabledLayerNames-parameter", + "text": " If enabledLayerCount is not 0, ppEnabledLayerNames must be a valid pointer to an array of enabledLayerCount null-terminated UTF-8 strings" + }, + { + "vuid": "VUID-VkInstanceCreateInfo-ppEnabledExtensionNames-parameter", + "text": " If enabledExtensionCount is not 0, ppEnabledExtensionNames must be a valid pointer to an array of enabledExtensionCount null-terminated UTF-8 strings" + } + ] + }, + "VkValidationFlagsEXT": { + "(VK_EXT_validation_flags)": [ + { + "vuid": "VUID-VkValidationFlagsEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT" + }, + { + "vuid": "VUID-VkValidationFlagsEXT-pDisabledValidationChecks-parameter", + "text": " pDisabledValidationChecks must be a valid pointer to an array of disabledValidationCheckCount VkValidationCheckEXT values" + }, + { + "vuid": "VUID-VkValidationFlagsEXT-disabledValidationCheckCount-arraylength", + "text": " disabledValidationCheckCount must be greater than 0" + } + ] + }, + "VkApplicationInfo": { + "core": [ + { + "vuid": "VUID-VkApplicationInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_APPLICATION_INFO" + }, + { + "vuid": "VUID-VkApplicationInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkApplicationInfo-pApplicationName-parameter", + "text": " If pApplicationName is not NULL, pApplicationName must be a null-terminated UTF-8 string" + }, + { + "vuid": "VUID-VkApplicationInfo-pEngineName-parameter", + "text": " If pEngineName is not NULL, pEngineName must be a null-terminated UTF-8 string" + } + ] + }, + "vkDestroyInstance": { + "core": [ + { + "vuid": "VUID-vkDestroyInstance-instance-00629", + "text": " All child objects created using instance must have been destroyed prior to destroying instance" + }, + { + "vuid": "VUID-vkDestroyInstance-instance-00630", + "text": " If VkAllocationCallbacks were provided when instance was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyInstance-instance-00631", + "text": " If no VkAllocationCallbacks were provided when instance was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyInstance-instance-parameter", + "text": " If instance is not NULL, instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkDestroyInstance-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + } + ] + }, + "vkEnumeratePhysicalDevices": { + "core": [ + { + "vuid": "VUID-vkEnumeratePhysicalDevices-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkEnumeratePhysicalDevices-pPhysicalDeviceCount-parameter", + "text": " pPhysicalDeviceCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkEnumeratePhysicalDevices-pPhysicalDevices-parameter", + "text": " If the value referenced by pPhysicalDeviceCount is not 0, and pPhysicalDevices is not NULL, pPhysicalDevices must be a valid pointer to an array of pPhysicalDeviceCount VkPhysicalDevice handles" + } + ] + }, + "vkGetPhysicalDeviceProperties": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceProperties-pProperties-parameter", + "text": " pProperties must be a valid pointer to a VkPhysicalDeviceProperties structure" + } + ] + }, + "vkGetPhysicalDeviceProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceProperties2-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceProperties2-pProperties-parameter", + "text": " pProperties must be a valid pointer to a VkPhysicalDeviceProperties2 structure" + } + ] + }, + "VkPhysicalDeviceProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkPhysicalDeviceProperties2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2" + }, + { + "vuid": "VUID-VkPhysicalDeviceProperties2-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceDescriptorIndexingPropertiesEXT, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDevicePointClippingProperties, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceSubgroupProperties, or VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT" + }, + { + "vuid": "VUID-VkPhysicalDeviceProperties2-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + } + ] + }, + "VkPhysicalDeviceIDProperties": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_external_memory_capabilities,VK_KHR_external_semaphore_capabilities,VK_KHR_external_fence_capabilities)": [ + { + "vuid": "VUID-VkPhysicalDeviceIDProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES" + } + ] + }, + "vkGetPhysicalDeviceQueueFamilyProperties": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyPropertyCount-parameter", + "text": " pQueueFamilyPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties-pQueueFamilyProperties-parameter", + "text": " If the value referenced by pQueueFamilyPropertyCount is not 0, and pQueueFamilyProperties is not NULL, pQueueFamilyProperties must be a valid pointer to an array of pQueueFamilyPropertyCount VkQueueFamilyProperties structures" + } + ] + }, + "vkGetPhysicalDeviceQueueFamilyProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyPropertyCount-parameter", + "text": " pQueueFamilyPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceQueueFamilyProperties2-pQueueFamilyProperties-parameter", + "text": " If the value referenced by pQueueFamilyPropertyCount is not 0, and pQueueFamilyProperties is not NULL, pQueueFamilyProperties must be a valid pointer to an array of pQueueFamilyPropertyCount VkQueueFamilyProperties2 structures" + } + ] + }, + "VkQueueFamilyProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkQueueFamilyProperties2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2" + }, + { + "vuid": "VUID-VkQueueFamilyProperties2-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkEnumeratePhysicalDeviceGroups": { + "(VK_VERSION_1_1,VK_KHR_device_group_creation)": [ + { + "vuid": "VUID-vkEnumeratePhysicalDeviceGroups-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupCount-parameter", + "text": " pPhysicalDeviceGroupCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkEnumeratePhysicalDeviceGroups-pPhysicalDeviceGroupProperties-parameter", + "text": " If the value referenced by pPhysicalDeviceGroupCount is not 0, and pPhysicalDeviceGroupProperties is not NULL, pPhysicalDeviceGroupProperties must be a valid pointer to an array of pPhysicalDeviceGroupCount VkPhysicalDeviceGroupProperties structures" + } + ] + }, + "vkCreateDevice": { + "core": [ + { + "vuid": "VUID-vkCreateDevice-ppEnabledExtensionNames-01387", + "text": " All &amp;lt;&amp;lt;extended-functionality-extensions-dependencies, required extensions&amp;gt;&amp;gt; for each extension in the VkDeviceCreateInfo::ppEnabledExtensionNames list must also be present in that list." + }, + { + "vuid": "VUID-vkCreateDevice-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkCreateDevice-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDeviceCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateDevice-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDevice-pDevice-parameter", + "text": " pDevice must be a valid pointer to a VkDevice handle" + } + ] + }, + "VkDeviceCreateInfo": { + "core": [ + { + "vuid": "VUID-VkDeviceCreateInfo-queueFamilyIndex-00372", + "text": "" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupDeviceCreateInfo, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDeviceDescriptorIndexingFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceSamplerYcbcrConversionFeatures, or VkPhysicalDeviceVariablePointerFeatures" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-pQueueCreateInfos-parameter", + "text": " pQueueCreateInfos must be a valid pointer to an array of queueCreateInfoCount valid VkDeviceQueueCreateInfo structures" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-ppEnabledLayerNames-parameter", + "text": " If enabledLayerCount is not 0, ppEnabledLayerNames must be a valid pointer to an array of enabledLayerCount null-terminated UTF-8 strings" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-parameter", + "text": " If enabledExtensionCount is not 0, ppEnabledExtensionNames must be a valid pointer to an array of enabledExtensionCount null-terminated UTF-8 strings" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-pEnabledFeatures-parameter", + "text": " If pEnabledFeatures is not NULL, pEnabledFeatures must be a valid pointer to a valid VkPhysicalDeviceFeatures structure" + }, + { + "vuid": "VUID-VkDeviceCreateInfo-queueCreateInfoCount-arraylength", + "text": " queueCreateInfoCount must be greater than 0" + } + ], + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-pNext-00373", + "text": " If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL" + } + ], + "(VK_AMD_negative_viewport_height)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-01840", + "text": " ppEnabledExtensionNames must not contain VK_AMD_negative_viewport_height" + } + ], + "(VK_AMD_negative_viewport_height)+!(VK_VERSION_1_1)+(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkDeviceCreateInfo-ppEnabledExtensionNames-00374", + "text": " ppEnabledExtensionNames must not contain both VK_KHR_maintenance1 and VK_AMD_negative_viewport_height" + } + ] + }, + "VkDeviceGroupDeviceCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_device_group_creation)": [ + { + "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00375", + "text": " Each element of pPhysicalDevices must be unique" + }, + { + "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-00376", + "text": " All elements of pPhysicalDevices must be in the same device group as enumerated by vkEnumeratePhysicalDeviceGroups" + }, + { + "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-physicalDeviceCount-00377", + "text": " If physicalDeviceCount is not 0, the physicalDevice parameter of vkCreateDevice must be an element of pPhysicalDevices." + }, + { + "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO" + }, + { + "vuid": "VUID-VkDeviceGroupDeviceCreateInfo-pPhysicalDevices-parameter", + "text": " If physicalDeviceCount is not 0, pPhysicalDevices must be a valid pointer to an array of physicalDeviceCount valid VkPhysicalDevice handles" + } + ] + }, + "vkDestroyDevice": { + "core": [ + { + "vuid": "VUID-vkDestroyDevice-device-00378", + "text": " All child objects created on device must have been destroyed prior to destroying device" + }, + { + "vuid": "VUID-vkDestroyDevice-device-00379", + "text": " If VkAllocationCallbacks were provided when device was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyDevice-device-00380", + "text": " If no VkAllocationCallbacks were provided when device was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyDevice-device-parameter", + "text": " If device is not NULL, device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyDevice-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + } + ] + }, + "VkDeviceQueueCreateInfo": { + "core": [ + { + "vuid": "VUID-VkDeviceQueueCreateInfo-queueFamilyIndex-00381", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties" + }, + { + "vuid": "VUID-VkDeviceQueueCreateInfo-queueCount-00382", + "text": " queueCount must be less than or equal to the queueCount member of the VkQueueFamilyProperties structure, as returned by vkGetPhysicalDeviceQueueFamilyProperties in the pQueueFamilyProperties[queueFamilyIndex]" + }, + { + "vuid": "VUID-VkDeviceQueueCreateInfo-pQueuePriorities-00383", + "text": " Each element of pQueuePriorities must be between 0.0 and 1.0 inclusive" + }, + { + "vuid": "VUID-VkDeviceQueueCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO" + }, + { + "vuid": "VUID-VkDeviceQueueCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDeviceQueueGlobalPriorityCreateInfoEXT" + }, + { + "vuid": "VUID-VkDeviceQueueCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkDeviceQueueCreateFlagBits values" + }, + { + "vuid": "VUID-VkDeviceQueueCreateInfo-pQueuePriorities-parameter", + "text": " pQueuePriorities must be a valid pointer to an array of queueCount float values" + }, + { + "vuid": "VUID-VkDeviceQueueCreateInfo-queueCount-arraylength", + "text": " queueCount must be greater than 0" + } + ] + }, + "VkDeviceQueueGlobalPriorityCreateInfoEXT": { + "(VK_EXT_global_priority)": [ + { + "vuid": "VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkDeviceQueueGlobalPriorityCreateInfoEXT-globalPriority-parameter", + "text": " globalPriority must be a valid VkQueueGlobalPriorityEXT value" + } + ] + }, + "vkGetDeviceQueue": { + "core": [ + { + "vuid": "VUID-vkGetDeviceQueue-queueFamilyIndex-00384", + "text": " queueFamilyIndex must be one of the queue family indices specified when device was created, via the VkDeviceQueueCreateInfo structure" + }, + { + "vuid": "VUID-vkGetDeviceQueue-queueIndex-00385", + "text": " queueIndex must be less than the number of queues created for the specified queue family index when device was created, via the queueCount member of the VkDeviceQueueCreateInfo structure" + }, + { + "vuid": "VUID-vkGetDeviceQueue-flags-01841", + "text": " VkDeviceQueueCreateInfo::flags must have been set to zero when device was created" + }, + { + "vuid": "VUID-vkGetDeviceQueue-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceQueue-pQueue-parameter", + "text": " pQueue must be a valid pointer to a VkQueue handle" + } + ] + }, + "vkGetDeviceQueue2": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkGetDeviceQueue2-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceQueue2-pQueueInfo-parameter", + "text": " pQueueInfo must be a valid pointer to a valid VkDeviceQueueInfo2 structure" + }, + { + "vuid": "VUID-vkGetDeviceQueue2-pQueue-parameter", + "text": " pQueue must be a valid pointer to a VkQueue handle" + } + ] + }, + "VkDeviceQueueInfo2": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkDeviceQueueInfo2-queueFamilyIndex-01842", + "text": " queueFamilyIndex must be one of the queue family indices specified when device was created, via the VkDeviceQueueCreateInfo structure" + }, + { + "vuid": "VUID-VkDeviceQueueInfo2-queueIndex-01843", + "text": " queueIndex must be less than the number of queues created for the specified queue family index and VkDeviceQueueCreateFlags member flags equal to this flags value when device was created, via the queueCount member of the VkDeviceQueueCreateInfo structure" + }, + { + "vuid": "VUID-VkDeviceQueueInfo2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2" + }, + { + "vuid": "VUID-VkDeviceQueueInfo2-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDeviceQueueInfo2-flags-parameter", + "text": " flags must be a valid combination of VkDeviceQueueCreateFlagBits values" + }, + { + "vuid": "VUID-VkDeviceQueueInfo2-flags-requiredbitmask", + "text": " flags must not be 0" + } + ] + }, + "vkCreateCommandPool": { + "core": [ + { + "vuid": "VUID-vkCreateCommandPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateCommandPool-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkCommandPoolCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateCommandPool-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateCommandPool-pCommandPool-parameter", + "text": " pCommandPool must be a valid pointer to a VkCommandPool handle" + } + ] + }, + "VkCommandPoolCreateInfo": { + "core": [ + { + "vuid": "VUID-VkCommandPoolCreateInfo-queueFamilyIndex-00039", + "text": " queueFamilyIndex must be the index of a queue family available in the calling command’s device parameter" + }, + { + "vuid": "VUID-VkCommandPoolCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO" + }, + { + "vuid": "VUID-VkCommandPoolCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCommandPoolCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkCommandPoolCreateFlagBits values" + } + ] + }, + "vkTrimCommandPool": { + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkTrimCommandPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkTrimCommandPool-commandPool-parameter", + "text": " commandPool must be a valid VkCommandPool handle" + }, + { + "vuid": "VUID-vkTrimCommandPool-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-vkTrimCommandPool-commandPool-parent", + "text": " commandPool must have been created, allocated, or retrieved from device" + } + ] + }, + "vkResetCommandPool": { + "core": [ + { + "vuid": "VUID-vkResetCommandPool-commandPool-00040", + "text": " All VkCommandBuffer objects allocated from commandPool must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkResetCommandPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkResetCommandPool-commandPool-parameter", + "text": " commandPool must be a valid VkCommandPool handle" + }, + { + "vuid": "VUID-vkResetCommandPool-flags-parameter", + "text": " flags must be a valid combination of VkCommandPoolResetFlagBits values" + }, + { + "vuid": "VUID-vkResetCommandPool-commandPool-parent", + "text": " commandPool must have been created, allocated, or retrieved from device" + } + ] + }, + "vkDestroyCommandPool": { + "core": [ + { + "vuid": "VUID-vkDestroyCommandPool-commandPool-00041", + "text": " All VkCommandBuffer objects allocated from commandPool must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkDestroyCommandPool-commandPool-00042", + "text": " If VkAllocationCallbacks were provided when commandPool was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyCommandPool-commandPool-00043", + "text": " If no VkAllocationCallbacks were provided when commandPool was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyCommandPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyCommandPool-commandPool-parameter", + "text": " If commandPool is not VK_NULL_HANDLE, commandPool must be a valid VkCommandPool handle" + }, + { + "vuid": "VUID-vkDestroyCommandPool-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyCommandPool-commandPool-parent", + "text": " If commandPool is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkAllocateCommandBuffers": { + "core": [ + { + "vuid": "VUID-vkAllocateCommandBuffers-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkAllocateCommandBuffers-pAllocateInfo-parameter", + "text": " pAllocateInfo must be a valid pointer to a valid VkCommandBufferAllocateInfo structure" + }, + { + "vuid": "VUID-vkAllocateCommandBuffers-pCommandBuffers-parameter", + "text": " pCommandBuffers must be a valid pointer to an array of pAllocateInfo::commandBufferCount VkCommandBuffer handles" + } + ] + }, + "VkCommandBufferAllocateInfo": { + "core": [ + { + "vuid": "VUID-VkCommandBufferAllocateInfo-commandBufferCount-00044", + "text": " commandBufferCount must be greater than 0" + }, + { + "vuid": "VUID-VkCommandBufferAllocateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO" + }, + { + "vuid": "VUID-VkCommandBufferAllocateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCommandBufferAllocateInfo-commandPool-parameter", + "text": " commandPool must be a valid VkCommandPool handle" + }, + { + "vuid": "VUID-VkCommandBufferAllocateInfo-level-parameter", + "text": " level must be a valid VkCommandBufferLevel value" + } + ] + }, + "vkResetCommandBuffer": { + "core": [ + { + "vuid": "VUID-vkResetCommandBuffer-commandBuffer-00045", + "text": " commandBuffer must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkResetCommandBuffer-commandBuffer-00046", + "text": " commandBuffer must have been allocated from a pool that was created with the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT" + }, + { + "vuid": "VUID-vkResetCommandBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkResetCommandBuffer-flags-parameter", + "text": " flags must be a valid combination of VkCommandBufferResetFlagBits values" + } + ] + }, + "vkFreeCommandBuffers": { + "core": [ + { + "vuid": "VUID-vkFreeCommandBuffers-pCommandBuffers-00047", + "text": " All elements of pCommandBuffers must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkFreeCommandBuffers-pCommandBuffers-00048", + "text": " pCommandBuffers must be a valid pointer to an array of commandBufferCount VkCommandBuffer handles, each element of which must either be a valid handle or NULL" + }, + { + "vuid": "VUID-vkFreeCommandBuffers-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkFreeCommandBuffers-commandPool-parameter", + "text": " commandPool must be a valid VkCommandPool handle" + }, + { + "vuid": "VUID-vkFreeCommandBuffers-commandBufferCount-arraylength", + "text": " commandBufferCount must be greater than 0" + }, + { + "vuid": "VUID-vkFreeCommandBuffers-commandPool-parent", + "text": " commandPool must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkFreeCommandBuffers-pCommandBuffers-parent", + "text": " Each element of pCommandBuffers that is a valid handle must have been created, allocated, or retrieved from commandPool" + } + ] + }, + "vkBeginCommandBuffer": { + "core": [ + { + "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00049", + "text": " commandBuffer must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording or pending state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00050", + "text": " If commandBuffer was allocated from a VkCommandPool which did not have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT flag set, commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, initial state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00051", + "text": " If commandBuffer is a secondary command buffer, the pInheritanceInfo member of pBeginInfo must be a valid VkCommandBufferInheritanceInfo structure" + }, + { + "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-00052", + "text": " If commandBuffer is a secondary command buffer and either the occlusionQueryEnable member of the pInheritanceInfo member of pBeginInfo is VK_FALSE, or the precise occlusion queries feature is not enabled, the queryFlags member of the pInheritanceInfo member pBeginInfo must not contain VK_QUERY_CONTROL_PRECISE_BIT" + }, + { + "vuid": "VUID-vkBeginCommandBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkBeginCommandBuffer-pBeginInfo-parameter", + "text": " pBeginInfo must be a valid pointer to a valid VkCommandBufferBeginInfo structure" + } + ] + }, + "VkCommandBufferBeginInfo": { + "core": [ + { + "vuid": "VUID-VkCommandBufferBeginInfo-flags-00053", + "text": " If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the renderPass member of pInheritanceInfo must be a valid VkRenderPass" + }, + { + "vuid": "VUID-VkCommandBufferBeginInfo-flags-00054", + "text": " If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the subpass member of pInheritanceInfo must be a valid subpass index within the renderPass member of pInheritanceInfo" + }, + { + "vuid": "VUID-VkCommandBufferBeginInfo-flags-00055", + "text": " If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the framebuffer member of pInheritanceInfo must be either VK_NULL_HANDLE, or a valid VkFramebuffer that is compatible with the renderPass member of pInheritanceInfo" + }, + { + "vuid": "VUID-VkCommandBufferBeginInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO" + }, + { + "vuid": "VUID-VkCommandBufferBeginInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDeviceGroupCommandBufferBeginInfo" + }, + { + "vuid": "VUID-VkCommandBufferBeginInfo-flags-parameter", + "text": " flags must be a valid combination of VkCommandBufferUsageFlagBits values" + } + ] + }, + "VkCommandBufferInheritanceInfo": { + "core": [ + { + "vuid": "VUID-VkCommandBufferInheritanceInfo-occlusionQueryEnable-00056", + "text": " If the &amp;lt;&amp;lt;features-features-inheritedQueries,inherited queries&amp;gt;&amp;gt; feature is not enabled, occlusionQueryEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceInfo-queryFlags-00057", + "text": " If the &amp;lt;&amp;lt;features-features-inheritedQueries,inherited queries&amp;gt;&amp;gt; feature is enabled, queryFlags must be a valid combination of VkQueryControlFlagBits values" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceInfo-pipelineStatistics-00058", + "text": " If the &amp;lt;&amp;lt;features-features-pipelineStatisticsQuery,pipeline statistics queries&amp;gt;&amp;gt; feature is not enabled, pipelineStatistics must be 0" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCommandBufferInheritanceInfo-commonparent", + "text": " Both of framebuffer, and renderPass that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkEndCommandBuffer": { + "core": [ + { + "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00059", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00060", + "text": " If commandBuffer is a primary command buffer, there must not be an active render pass instance" + }, + { + "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00061", + "text": " All queries made &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt; during the recording of commandBuffer must have been made inactive" + }, + { + "vuid": "VUID-vkEndCommandBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + } + ], + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkEndCommandBuffer-commandBuffer-01815", + "text": " If commandBuffer is a secondary command buffer, there must not be an outstanding vkCmdBeginDebugUtilsLabelEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdEndDebugUtilsLabelEXT." + } + ], + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-vkEndCommandBuffer-commandBuffer-00062", + "text": " If commandBuffer is a secondary command buffer, there must not be an outstanding vkCmdDebugMarkerBeginEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdDebugMarkerEndEXT." + } + ] + }, + "vkQueueSubmit": { + "core": [ + { + "vuid": "VUID-vkQueueSubmit-fence-00063", + "text": " If fence is not VK_NULL_HANDLE, fence must be unsignaled" + }, + { + "vuid": "VUID-vkQueueSubmit-fence-00064", + "text": " If fence is not VK_NULL_HANDLE, fence must not be associated with any other queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00065", + "text": " Any calls to vkCmdSetEvent, vkCmdResetEvent or vkCmdWaitEvents that have been recorded into any of the command buffer elements of the pCommandBuffers member of any element of pSubmits, must not reference any VkEvent that is referenced by any of those commands in a command buffer that has been submitted to another queue and is still in the pending state." + }, + { + "vuid": "VUID-vkQueueSubmit-pWaitDstStageMask-00066", + "text": " Any stage flag included in any element of the pWaitDstStageMask member of any element of pSubmits must be a pipeline stage supported by one of the capabilities of queue, as specified in the &amp;lt;&amp;lt;synchronization-pipeline-stages-supported, table of supported pipeline stages&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkQueueSubmit-pSignalSemaphores-00067", + "text": " Each element of the pSignalSemaphores member of any element of pSubmits must be unsignaled when the semaphore signal operation it defines is executed on the device" + }, + { + "vuid": "VUID-vkQueueSubmit-pWaitSemaphores-00068", + "text": " When a semaphore unsignal operation defined by any element of the pWaitSemaphores member of any element of pSubmits executes on queue, no other queue must be waiting on the same semaphore." + }, + { + "vuid": "VUID-vkQueueSubmit-pWaitSemaphores-00069", + "text": " All elements of the pWaitSemaphores member of all elements of pSubmits must be semaphores that are signaled, or have &amp;lt;&amp;lt;synchronization-semaphores-signaling, semaphore signal operations&amp;gt;&amp;gt; previously submitted for execution." + }, + { + "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00070", + "text": " Each element of the pCommandBuffers member of each element of pSubmits must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending or executable state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00071", + "text": " If any element of the pCommandBuffers member of any element of pSubmits was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, it must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00072", + "text": " Any &amp;lt;&amp;lt;commandbuffers-secondary, secondary command buffers recorded&amp;gt;&amp;gt; into any element of the pCommandBuffers member of any element of pSubmits must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending or executable state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00073", + "text": " If any &amp;lt;&amp;lt;commandbuffers-secondary, secondary command buffers recorded&amp;gt;&amp;gt; into any element of the pCommandBuffers member of any element of pSubmits was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT, it must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkQueueSubmit-pCommandBuffers-00074", + "text": " Each element of the pCommandBuffers member of each element of pSubmits must have been allocated from a VkCommandPool that was created for the same queue family queue belongs to." + }, + { + "vuid": "VUID-vkQueueSubmit-queue-parameter", + "text": " queue must be a valid VkQueue handle" + }, + { + "vuid": "VUID-vkQueueSubmit-pSubmits-parameter", + "text": " If submitCount is not 0, pSubmits must be a valid pointer to an array of submitCount valid VkSubmitInfo structures" + }, + { + "vuid": "VUID-vkQueueSubmit-fence-parameter", + "text": " If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-vkQueueSubmit-commonparent", + "text": " Both of fence, and queue that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkSubmitInfo": { + "core": [ + { + "vuid": "VUID-VkSubmitInfo-pCommandBuffers-00075", + "text": " Each element of pCommandBuffers must not have been allocated with VK_COMMAND_BUFFER_LEVEL_SECONDARY" + }, + { + "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-00076", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, each element of pWaitDstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-00077", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, each element of pWaitDstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-00078", + "text": " Each element of pWaitDstStageMask must not include VK_PIPELINE_STAGE_HOST_BIT." + }, + { + "vuid": "VUID-VkSubmitInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SUBMIT_INFO" + }, + { + "vuid": "VUID-VkSubmitInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkD3D12FenceSubmitInfoKHR, VkDeviceGroupSubmitInfo, VkProtectedSubmitInfo, VkWin32KeyedMutexAcquireReleaseInfoKHR, or VkWin32KeyedMutexAcquireReleaseInfoNV" + }, + { + "vuid": "VUID-VkSubmitInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkSubmitInfo-pWaitSemaphores-parameter", + "text": " If waitSemaphoreCount is not 0, pWaitSemaphores must be a valid pointer to an array of waitSemaphoreCount valid VkSemaphore handles" + }, + { + "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-parameter", + "text": " If waitSemaphoreCount is not 0, pWaitDstStageMask must be a valid pointer to an array of waitSemaphoreCount valid combinations of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-VkSubmitInfo-pWaitDstStageMask-requiredbitmask", + "text": " Each element of pWaitDstStageMask must not be 0" + }, + { + "vuid": "VUID-VkSubmitInfo-pCommandBuffers-parameter", + "text": " If commandBufferCount is not 0, pCommandBuffers must be a valid pointer to an array of commandBufferCount valid VkCommandBuffer handles" + }, + { + "vuid": "VUID-VkSubmitInfo-pSignalSemaphores-parameter", + "text": " If signalSemaphoreCount is not 0, pSignalSemaphores must be a valid pointer to an array of signalSemaphoreCount valid VkSemaphore handles" + }, + { + "vuid": "VUID-VkSubmitInfo-commonparent", + "text": " Each of the elements of pCommandBuffers, the elements of pSignalSemaphores, and the elements of pWaitSemaphores that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkD3D12FenceSubmitInfoKHR": { + "(VK_KHR_external_semaphore_win32)": [ + { + "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-waitSemaphoreValuesCount-00079", + "text": " waitSemaphoreValuesCount must be the same value as VkSubmitInfo::waitSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure." + }, + { + "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-signalSemaphoreValuesCount-00080", + "text": " signalSemaphoreValuesCount must be the same value as VkSubmitInfo::signalSemaphoreCount, where VkSubmitInfo is in the pNext chain of this VkD3D12FenceSubmitInfoKHR structure." + }, + { + "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR" + }, + { + "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-pWaitSemaphoreValues-parameter", + "text": " If waitSemaphoreValuesCount is not 0, and pWaitSemaphoreValues is not NULL, pWaitSemaphoreValues must be a valid pointer to an array of waitSemaphoreValuesCount uint64_t values" + }, + { + "vuid": "VUID-VkD3D12FenceSubmitInfoKHR-pSignalSemaphoreValues-parameter", + "text": " If signalSemaphoreValuesCount is not 0, and pSignalSemaphoreValues is not NULL, pSignalSemaphoreValues must be a valid pointer to an array of signalSemaphoreValuesCount uint64_t values" + } + ] + }, + "VkWin32KeyedMutexAcquireReleaseInfoKHR": { + "(VK_KHR_win32_keyed_mutex)": [ + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-00081", + "text": " Each member of pAcquireSyncs and pReleaseSyncs must be a device memory object imported by setting VkImportMemoryWin32HandleInfoKHR::handleType to VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT." + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireSyncs-parameter", + "text": " If acquireCount is not 0, pAcquireSyncs must be a valid pointer to an array of acquireCount valid VkDeviceMemory handles" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireKeys-parameter", + "text": " If acquireCount is not 0, pAcquireKeys must be a valid pointer to an array of acquireCount uint64_t values" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pAcquireTimeouts-parameter", + "text": " If acquireCount is not 0, pAcquireTimeouts must be a valid pointer to an array of acquireCount uint32_t values" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseSyncs-parameter", + "text": " If releaseCount is not 0, pReleaseSyncs must be a valid pointer to an array of releaseCount valid VkDeviceMemory handles" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-pReleaseKeys-parameter", + "text": " If releaseCount is not 0, pReleaseKeys must be a valid pointer to an array of releaseCount uint64_t values" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoKHR-commonparent", + "text": " Both of the elements of pAcquireSyncs, and the elements of pReleaseSyncs that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkWin32KeyedMutexAcquireReleaseInfoNV": { + "(VK_NV_win32_keyed_mutex)": [ + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireSyncs-parameter", + "text": " If acquireCount is not 0, pAcquireSyncs must be a valid pointer to an array of acquireCount valid VkDeviceMemory handles" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireKeys-parameter", + "text": " If acquireCount is not 0, pAcquireKeys must be a valid pointer to an array of acquireCount uint64_t values" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pAcquireTimeoutMilliseconds-parameter", + "text": " If acquireCount is not 0, pAcquireTimeoutMilliseconds must be a valid pointer to an array of acquireCount uint32_t values" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseSyncs-parameter", + "text": " If releaseCount is not 0, pReleaseSyncs must be a valid pointer to an array of releaseCount valid VkDeviceMemory handles" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-pReleaseKeys-parameter", + "text": " If releaseCount is not 0, pReleaseKeys must be a valid pointer to an array of releaseCount uint64_t values" + }, + { + "vuid": "VUID-VkWin32KeyedMutexAcquireReleaseInfoNV-commonparent", + "text": " Both of the elements of pAcquireSyncs, and the elements of pReleaseSyncs that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkProtectedSubmitInfo": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01816", + "text": " If the protected memory feature is not enabled, protectedSubmit must not be VK_TRUE." + }, + { + "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01817", + "text": " If protectedSubmit is VK_TRUE, then each element of the pCommandBuffers array must be a protected command buffer." + }, + { + "vuid": "VUID-VkProtectedSubmitInfo-protectedSubmit-01818", + "text": " If protectedSubmit is VK_FALSE, then each element of the pCommandBuffers array must be an unprotected command buffer." + }, + { + "vuid": "VUID-VkProtectedSubmitInfo-pNext-01819", + "text": " If the VkSubmitInfo::pNext chain does not include a VkProtectedSubmitInfo structure, then each element of the command buffer of the pCommandBuffers array must be an unprotected command buffer." + }, + { + "vuid": "VUID-VkProtectedSubmitInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO" + } + ] + }, + "VkDeviceGroupSubmitInfo": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-waitSemaphoreCount-00082", + "text": " waitSemaphoreCount must equal VkSubmitInfo::waitSemaphoreCount" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-commandBufferCount-00083", + "text": " commandBufferCount must equal VkSubmitInfo::commandBufferCount" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-signalSemaphoreCount-00084", + "text": " signalSemaphoreCount must equal VkSubmitInfo::signalSemaphoreCount" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-00085", + "text": " All elements of pWaitSemaphoreDeviceIndices and pSignalSemaphoreDeviceIndices must be valid device indices" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-00086", + "text": " All elements of pCommandBufferDeviceMasks must be valid device masks" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-pWaitSemaphoreDeviceIndices-parameter", + "text": " If waitSemaphoreCount is not 0, pWaitSemaphoreDeviceIndices must be a valid pointer to an array of waitSemaphoreCount uint32_t values" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-pCommandBufferDeviceMasks-parameter", + "text": " If commandBufferCount is not 0, pCommandBufferDeviceMasks must be a valid pointer to an array of commandBufferCount uint32_t values" + }, + { + "vuid": "VUID-VkDeviceGroupSubmitInfo-pSignalSemaphoreDeviceIndices-parameter", + "text": " If signalSemaphoreCount is not 0, pSignalSemaphoreDeviceIndices must be a valid pointer to an array of signalSemaphoreCount uint32_t values" + } + ] + }, + "vkCmdExecuteCommands": { + "core": [ + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00087", + "text": " commandBuffer must have been allocated with a level of VK_COMMAND_BUFFER_LEVEL_PRIMARY" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00088", + "text": " Each element of pCommandBuffers must have been allocated with a level of VK_COMMAND_BUFFER_LEVEL_SECONDARY" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00089", + "text": " Each element of pCommandBuffers must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending or executable state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00090", + "text": " If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, and it was recorded into any other primary command buffer, that primary command buffer must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00091", + "text": " If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, it must not be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00092", + "text": " If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, it must not have already been recorded to commandBuffer." + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00093", + "text": " If any element of pCommandBuffers was not recorded with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag, it must not appear more than once in pCommandBuffers." + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00094", + "text": " Each element of pCommandBuffers must have been allocated from a VkCommandPool that was created for the same queue family as the VkCommandPool from which commandBuffer was allocated" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-contents-00095", + "text": " If vkCmdExecuteCommands is being called within a render pass instance, that render pass instance must have been begun with the contents parameter of vkCmdBeginRenderPass set to VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00096", + "text": " If vkCmdExecuteCommands is being called within a render pass instance, each element of pCommandBuffers must have been recorded with the VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00097", + "text": " If vkCmdExecuteCommands is being called within a render pass instance, each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::subpass set to the index of the subpass which the given command buffer will be executed in" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pInheritanceInfo-00098", + "text": " If vkCmdExecuteCommands is being called within a render pass instance, the render passes specified in the pname::pBeginInfo::pInheritanceInfo::renderPass members of the vkBeginCommandBuffer commands used to begin recording each element of pCommandBuffers must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the current render pass." + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00099", + "text": " If vkCmdExecuteCommands is being called within a render pass instance, and any element of pCommandBuffers was recorded with VkCommandBufferInheritanceInfo::framebuffer not equal to VK_NULL_HANDLE, that VkFramebuffer must match the VkFramebuffer used in the current render pass instance" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00100", + "text": " If vkCmdExecuteCommands is not being called within a render pass instance, each element of pCommandBuffers must not have been recorded with the VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00101", + "text": " If the &amp;lt;&amp;lt;features-features-inheritedQueries,inherited queries&amp;gt;&amp;gt; feature is not enabled, commandBuffer must not have any queries &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00102", + "text": " If commandBuffer has a VK_QUERY_TYPE_OCCLUSION query &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt;, then each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::occlusionQueryEnable set to VK_TRUE" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00103", + "text": " If commandBuffer has a VK_QUERY_TYPE_OCCLUSION query &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt;, then each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::queryFlags having all bits set that are set for the query" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-00104", + "text": " If commandBuffer has a VK_QUERY_TYPE_PIPELINE_STATISTICS query &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt;, then each element of pCommandBuffers must have been recorded with VkCommandBufferInheritanceInfo::pipelineStatistics having all bits set that are set in the VkQueryPool the query uses" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-00105", + "text": " Each element of pCommandBuffers must not begin any query types that are &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt; in commandBuffer" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-pCommandBuffers-parameter", + "text": " pCommandBuffers must be a valid pointer to an array of commandBufferCount valid VkCommandBuffer handles" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBufferCount-arraylength", + "text": " commandBufferCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commonparent", + "text": " Both of commandBuffer, and the elements of pCommandBuffers must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-01820", + "text": " If commandBuffer is a protected command buffer, then each element of pCommandBuffers must be a protected command buffer." + }, + { + "vuid": "VUID-vkCmdExecuteCommands-commandBuffer-01821", + "text": " If commandBuffer is an unprotected command buffer, then each element of pCommandBuffers must be an unprotected command buffer." + } + ] + }, + "VkDeviceGroupCommandBufferBeginInfo": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00106", + "text": " deviceMask must be a valid device mask value" + }, + { + "vuid": "VUID-VkDeviceGroupCommandBufferBeginInfo-deviceMask-00107", + "text": " deviceMask must not be zero" + }, + { + "vuid": "VUID-VkDeviceGroupCommandBufferBeginInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO" + } + ] + }, + "vkCmdSetDeviceMask": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00108", + "text": " deviceMask must be a valid device mask value" + }, + { + "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00109", + "text": " deviceMask must not be zero" + }, + { + "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00110", + "text": " deviceMask must not include any set bits that were not in the VkDeviceGroupCommandBufferBeginInfo::deviceMask value when the command buffer began recording." + }, + { + "vuid": "VUID-vkCmdSetDeviceMask-deviceMask-00111", + "text": " If vkCmdSetDeviceMask is called inside a render pass instance, deviceMask must not include any set bits that were not in the VkDeviceGroupRenderPassBeginInfo::deviceMask value when the render pass instance began recording." + }, + { + "vuid": "VUID-vkCmdSetDeviceMask-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetDeviceMask-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetDeviceMask-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, compute, or transfer operations" + } + ] + }, + "vkCreateFence": { + "core": [ + { + "vuid": "VUID-vkCreateFence-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateFence-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkFenceCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateFence-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateFence-pFence-parameter", + "text": " pFence must be a valid pointer to a VkFence handle" + } + ] + }, + "VkFenceCreateInfo": { + "core": [ + { + "vuid": "VUID-VkFenceCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_FENCE_CREATE_INFO" + }, + { + "vuid": "VUID-VkFenceCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkExportFenceCreateInfo or VkExportFenceWin32HandleInfoKHR" + }, + { + "vuid": "VUID-VkFenceCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkFenceCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkFenceCreateFlagBits values" + } + ] + }, + "VkExportFenceCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_external_fence)": [ + { + "vuid": "VUID-VkExportFenceCreateInfo-handleTypes-01446", + "text": " The bits in handleTypes must be supported and compatible, as reported by VkExternalFenceProperties." + }, + { + "vuid": "VUID-VkExportFenceCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO" + }, + { + "vuid": "VUID-VkExportFenceCreateInfo-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalFenceHandleTypeFlagBits values" + } + ] + }, + "VkExportFenceWin32HandleInfoKHR": { + "(VK_KHR_external_fence_win32)": [ + { + "vuid": "VUID-VkExportFenceWin32HandleInfoKHR-handleTypes-01447", + "text": " If VkExportFenceCreateInfo::handleTypes does not include VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, VkExportFenceWin32HandleInfoKHR must not be in the pNext chain of VkFenceCreateInfo." + }, + { + "vuid": "VUID-VkExportFenceWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkExportFenceWin32HandleInfoKHR-pAttributes-parameter", + "text": " If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value" + } + ] + }, + "vkGetFenceWin32HandleKHR": { + "(VK_KHR_external_fence_win32)": [ + { + "vuid": "VUID-vkGetFenceWin32HandleKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetFenceWin32HandleKHR-pGetWin32HandleInfo-parameter", + "text": " pGetWin32HandleInfo must be a valid pointer to a valid VkFenceGetWin32HandleInfoKHR structure" + }, + { + "vuid": "VUID-vkGetFenceWin32HandleKHR-pHandle-parameter", + "text": " pHandle must be a valid pointer to a HANDLE value" + } + ] + }, + "VkFenceGetWin32HandleInfoKHR": { + "(VK_KHR_external_fence_win32)": [ + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01448", + "text": " handleType must have been included in VkExportFenceCreateInfo::handleTypes when the fence’s current payload was created." + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01449", + "text": " If handleType is defined as an NT handle, vkGetFenceWin32HandleKHR must be called no more than once for each valid unique combination of fence and handleType." + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-fence-01450", + "text": " fence must not currently have its payload replaced by an imported payload as described below in &amp;lt;&amp;lt;synchronization-fences-importing,Importing Fence Payloads&amp;gt;&amp;gt; unless that imported payload’s handle type was included in VkExternalFenceProperties::exportFromImportedHandleTypes for handleType." + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01451", + "text": " If handleType refers to a handle type with copy payload transference semantics, fence must be signaled, or have an associated &amp;lt;&amp;lt;synchronization-fences-signaling,fence signal operation&amp;gt;&amp;gt; pending execution." + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-01452", + "text": " handleType must be defined as an NT handle or a global share handle." + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-fence-parameter", + "text": " fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-VkFenceGetWin32HandleInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalFenceHandleTypeFlagBits value" + } + ] + }, + "vkGetFenceFdKHR": { + "(VK_KHR_external_fence_fd)": [ + { + "vuid": "VUID-vkGetFenceFdKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetFenceFdKHR-pGetFdInfo-parameter", + "text": " pGetFdInfo must be a valid pointer to a valid VkFenceGetFdInfoKHR structure" + }, + { + "vuid": "VUID-vkGetFenceFdKHR-pFd-parameter", + "text": " pFd must be a valid pointer to a int value" + } + ] + }, + "VkFenceGetFdInfoKHR": { + "(VK_KHR_external_fence_fd)": [ + { + "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-01453", + "text": " handleType must have been included in VkExportFenceCreateInfo::handleTypes when fence’s current payload was created." + }, + { + "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-01454", + "text": " If handleType refers to a handle type with copy payload transference semantics, fence must be signaled, or have an associated &amp;lt;&amp;lt;synchronization-fences-signaling,fence signal operation&amp;gt;&amp;gt; pending execution." + }, + { + "vuid": "VUID-VkFenceGetFdInfoKHR-fence-01455", + "text": " fence must not currently have its payload replaced by an imported payload as described below in &amp;lt;&amp;lt;synchronization-fences-importing,Importing Fence Payloads&amp;gt;&amp;gt; unless that imported payload’s handle type was included in VkExternalFenceProperties::exportFromImportedHandleTypes for handleType." + }, + { + "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-01456", + "text": " handleType must be defined as a POSIX file descriptor handle." + }, + { + "vuid": "VUID-VkFenceGetFdInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR" + }, + { + "vuid": "VUID-VkFenceGetFdInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkFenceGetFdInfoKHR-fence-parameter", + "text": " fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-VkFenceGetFdInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalFenceHandleTypeFlagBits value" + } + ] + }, + "vkDestroyFence": { + "core": [ + { + "vuid": "VUID-vkDestroyFence-fence-01120", + "text": " All &amp;lt;&amp;lt;devsandqueues-submission, queue submission&amp;gt;&amp;gt; commands that refer to fence must have completed execution" + }, + { + "vuid": "VUID-vkDestroyFence-fence-01121", + "text": " If VkAllocationCallbacks were provided when fence was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyFence-fence-01122", + "text": " If no VkAllocationCallbacks were provided when fence was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyFence-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyFence-fence-parameter", + "text": " If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-vkDestroyFence-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyFence-fence-parent", + "text": " If fence is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetFenceStatus": { + "core": [ + { + "vuid": "VUID-vkGetFenceStatus-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetFenceStatus-fence-parameter", + "text": " fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-vkGetFenceStatus-fence-parent", + "text": " fence must have been created, allocated, or retrieved from device" + } + ] + }, + "vkResetFences": { + "core": [ + { + "vuid": "VUID-vkResetFences-pFences-01123", + "text": " Each element of pFences must not be currently associated with any queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-vkResetFences-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkResetFences-pFences-parameter", + "text": " pFences must be a valid pointer to an array of fenceCount valid VkFence handles" + }, + { + "vuid": "VUID-vkResetFences-fenceCount-arraylength", + "text": " fenceCount must be greater than 0" + }, + { + "vuid": "VUID-vkResetFences-pFences-parent", + "text": " Each element of pFences must have been created, allocated, or retrieved from device" + } + ] + }, + "vkWaitForFences": { + "core": [ + { + "vuid": "VUID-vkWaitForFences-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkWaitForFences-pFences-parameter", + "text": " pFences must be a valid pointer to an array of fenceCount valid VkFence handles" + }, + { + "vuid": "VUID-vkWaitForFences-fenceCount-arraylength", + "text": " fenceCount must be greater than 0" + }, + { + "vuid": "VUID-vkWaitForFences-pFences-parent", + "text": " Each element of pFences must have been created, allocated, or retrieved from device" + } + ] + }, + "vkRegisterDeviceEventEXT": { + "(VK_EXT_display_control)": [ + { + "vuid": "VUID-vkRegisterDeviceEventEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkRegisterDeviceEventEXT-pDeviceEventInfo-parameter", + "text": " pDeviceEventInfo must be a valid pointer to a valid VkDeviceEventInfoEXT structure" + }, + { + "vuid": "VUID-vkRegisterDeviceEventEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkRegisterDeviceEventEXT-pFence-parameter", + "text": " pFence must be a valid pointer to a VkFence handle" + } + ] + }, + "VkDeviceEventInfoEXT": { + "(VK_EXT_display_control)": [ + { + "vuid": "VUID-VkDeviceEventInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT" + }, + { + "vuid": "VUID-VkDeviceEventInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDeviceEventInfoEXT-deviceEvent-parameter", + "text": " deviceEvent must be a valid VkDeviceEventTypeEXT value" + } + ] + }, + "vkRegisterDisplayEventEXT": { + "(VK_EXT_display_control)": [ + { + "vuid": "VUID-vkRegisterDisplayEventEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkRegisterDisplayEventEXT-display-parameter", + "text": " display must be a valid VkDisplayKHR handle" + }, + { + "vuid": "VUID-vkRegisterDisplayEventEXT-pDisplayEventInfo-parameter", + "text": " pDisplayEventInfo must be a valid pointer to a valid VkDisplayEventInfoEXT structure" + }, + { + "vuid": "VUID-vkRegisterDisplayEventEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkRegisterDisplayEventEXT-pFence-parameter", + "text": " pFence must be a valid pointer to a VkFence handle" + } + ] + }, + "VkDisplayEventInfoEXT": { + "(VK_EXT_display_control)": [ + { + "vuid": "VUID-VkDisplayEventInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT" + }, + { + "vuid": "VUID-VkDisplayEventInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDisplayEventInfoEXT-displayEvent-parameter", + "text": " displayEvent must be a valid VkDisplayEventTypeEXT value" + } + ] + }, + "vkImportFenceWin32HandleKHR": { + "(VK_KHR_external_fence_win32)": [ + { + "vuid": "VUID-vkImportFenceWin32HandleKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkImportFenceWin32HandleKHR-pImportFenceWin32HandleInfo-parameter", + "text": " pImportFenceWin32HandleInfo must be a valid pointer to a valid VkImportFenceWin32HandleInfoKHR structure" + } + ] + }, + "VkImportFenceWin32HandleInfoKHR": { + "(VK_KHR_external_fence_win32)": [ + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01457", + "text": " handleType must be a value included in the &amp;lt;&amp;lt;synchronization-fence-handletypes-win32, Handle Types Supported by VkImportFenceWin32HandleInfoKHR&amp;gt;&amp;gt; table." + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01459", + "text": " If handleType is not VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, name must be NULL." + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01460", + "text": " If handleType is not 0 and handle is NULL, name must name a valid synchronization primitive of the type specified by handleType." + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-01461", + "text": " If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType." + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handle-01462", + "text": " If handle is not NULL, name must be NULL." + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handle-01539", + "text": " If handle is not NULL, it must obey any requirements listed for handleType in external fence handle types compatibility." + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-name-01540", + "text": " If name is not NULL, it must obey any requirements listed for handleType in external fence handle types compatibility." + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-fence-parameter", + "text": " fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkFenceImportFlagBits values" + }, + { + "vuid": "VUID-VkImportFenceWin32HandleInfoKHR-handleType-parameter", + "text": " If handleType is not 0, handleType must be a valid VkExternalFenceHandleTypeFlagBits value" + } + ] + }, + "vkImportFenceFdKHR": { + "(VK_KHR_external_fence_fd)": [ + { + "vuid": "VUID-vkImportFenceFdKHR-fence-01463", + "text": " fence must not be associated with any queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-vkImportFenceFdKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkImportFenceFdKHR-pImportFenceFdInfo-parameter", + "text": " pImportFenceFdInfo must be a valid pointer to a valid VkImportFenceFdInfoKHR structure" + } + ] + }, + "VkImportFenceFdInfoKHR": { + "(VK_KHR_external_fence_fd)": [ + { + "vuid": "VUID-VkImportFenceFdInfoKHR-handleType-01464", + "text": " handleType must be a value included in the &amp;lt;&amp;lt;synchronization-fence-handletypes-fd, Handle Types Supported by VkImportFenceFdInfoKHR&amp;gt;&amp;gt; table." + }, + { + "vuid": "VUID-VkImportFenceFdInfoKHR-fd-01541", + "text": " fd must obey any requirements listed for handleType in &amp;lt;&amp;lt;external-fence-handle-types-compatibility,external fence handle types compatibility&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkImportFenceFdInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR" + }, + { + "vuid": "VUID-VkImportFenceFdInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImportFenceFdInfoKHR-fence-parameter", + "text": " fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-VkImportFenceFdInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkFenceImportFlagBits values" + }, + { + "vuid": "VUID-VkImportFenceFdInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalFenceHandleTypeFlagBits value" + } + ] + }, + "vkCreateSemaphore": { + "core": [ + { + "vuid": "VUID-vkCreateSemaphore-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateSemaphore-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkSemaphoreCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateSemaphore-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateSemaphore-pSemaphore-parameter", + "text": " pSemaphore must be a valid pointer to a VkSemaphore handle" + } + ] + }, + "VkSemaphoreCreateInfo": { + "core": [ + { + "vuid": "VUID-VkSemaphoreCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO" + }, + { + "vuid": "VUID-VkSemaphoreCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkExportSemaphoreCreateInfo or VkExportSemaphoreWin32HandleInfoKHR" + }, + { + "vuid": "VUID-VkSemaphoreCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkSemaphoreCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "VkExportSemaphoreCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_external_semaphore)": [ + { + "vuid": "VUID-VkExportSemaphoreCreateInfo-handleTypes-01124", + "text": " The bits in handleTypes must be supported and compatible, as reported by VkExternalSemaphoreProperties." + }, + { + "vuid": "VUID-VkExportSemaphoreCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO" + }, + { + "vuid": "VUID-VkExportSemaphoreCreateInfo-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalSemaphoreHandleTypeFlagBits values" + } + ] + }, + "VkExportSemaphoreWin32HandleInfoKHR": { + "(VK_KHR_external_semaphore_win32)": [ + { + "vuid": "VUID-VkExportSemaphoreWin32HandleInfoKHR-handleTypes-01125", + "text": " If VkExportSemaphoreCreateInfo::handleTypes does not include VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT or VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, VkExportSemaphoreWin32HandleInfoKHR must not be in the pNext chain of VkSemaphoreCreateInfo." + }, + { + "vuid": "VUID-VkExportSemaphoreWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkExportSemaphoreWin32HandleInfoKHR-pAttributes-parameter", + "text": " If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value" + } + ] + }, + "vkGetSemaphoreWin32HandleKHR": { + "(VK_KHR_external_semaphore_win32)": [ + { + "vuid": "VUID-vkGetSemaphoreWin32HandleKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetSemaphoreWin32HandleKHR-pGetWin32HandleInfo-parameter", + "text": " pGetWin32HandleInfo must be a valid pointer to a valid VkSemaphoreGetWin32HandleInfoKHR structure" + }, + { + "vuid": "VUID-vkGetSemaphoreWin32HandleKHR-pHandle-parameter", + "text": " pHandle must be a valid pointer to a HANDLE value" + } + ] + }, + "VkSemaphoreGetWin32HandleInfoKHR": { + "(VK_KHR_external_semaphore_win32)": [ + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01126", + "text": " handleType must have been included in VkExportSemaphoreCreateInfo::handleTypes when the semaphore’s current payload was created." + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01127", + "text": " If handleType is defined as an NT handle, vkGetSemaphoreWin32HandleKHR must be called no more than once for each valid unique combination of semaphore and handleType." + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-01128", + "text": " semaphore must not currently have its payload replaced by an imported payload as described below in &amp;lt;&amp;lt;synchronization-semaphores-importing,Importing Semaphore Payloads&amp;gt;&amp;gt; unless that imported payload’s handle type was included in VkExternalSemaphoreProperties::exportFromImportedHandleTypes for handleType." + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01129", + "text": " If handleType refers to a handle type with copy payload transference semantics, as defined below in &amp;lt;&amp;lt;synchronization-semaphores-importing,Importing Semaphore Payloads&amp;gt;&amp;gt;, there must be no queue waiting on semaphore." + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01130", + "text": " If handleType refers to a handle type with copy payload transference semantics, semaphore must be signaled, or have an associated &amp;lt;&amp;lt;synchronization-semaphores-signaling,semaphore signal operation&amp;gt;&amp;gt; pending execution." + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-01131", + "text": " handleType must be defined as an NT handle or a global share handle." + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-semaphore-parameter", + "text": " semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkSemaphoreGetWin32HandleInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" + } + ] + }, + "vkGetSemaphoreFdKHR": { + "(VK_KHR_external_semaphore_fd)": [ + { + "vuid": "VUID-vkGetSemaphoreFdKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetSemaphoreFdKHR-pGetFdInfo-parameter", + "text": " pGetFdInfo must be a valid pointer to a valid VkSemaphoreGetFdInfoKHR structure" + }, + { + "vuid": "VUID-vkGetSemaphoreFdKHR-pFd-parameter", + "text": " pFd must be a valid pointer to a int value" + } + ] + }, + "VkSemaphoreGetFdInfoKHR": { + "(VK_KHR_external_semaphore_fd)": [ + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01132", + "text": " handleType must have been included in VkExportSemaphoreCreateInfo::handleTypes when semaphore’s current payload was created." + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-semaphore-01133", + "text": " semaphore must not currently have its payload replaced by an imported payload as described below in &amp;lt;&amp;lt;synchronization-semaphores-importing,Importing Semaphore Payloads&amp;gt;&amp;gt; unless that imported payload’s handle type was included in VkExternalSemaphoreProperties::exportFromImportedHandleTypes for handleType." + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01134", + "text": " If handleType refers to a handle type with copy payload transference semantics, as defined below in &amp;lt;&amp;lt;synchronization-semaphores-importing,Importing Semaphore Payloads&amp;gt;&amp;gt;, there must be no queue waiting on semaphore." + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01135", + "text": " If handleType refers to a handle type with copy payload transference semantics, semaphore must be signaled, or have an associated &amp;lt;&amp;lt;synchronization-semaphores-signaling,semaphore signal operation&amp;gt;&amp;gt; pending execution." + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-01136", + "text": " handleType must be defined as a POSIX file descriptor handle." + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR" + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-semaphore-parameter", + "text": " semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkSemaphoreGetFdInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" + } + ] + }, + "vkDestroySemaphore": { + "core": [ + { + "vuid": "VUID-vkDestroySemaphore-semaphore-01137", + "text": " All submitted batches that refer to semaphore must have completed execution" + }, + { + "vuid": "VUID-vkDestroySemaphore-semaphore-01138", + "text": " If VkAllocationCallbacks were provided when semaphore was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroySemaphore-semaphore-01139", + "text": " If no VkAllocationCallbacks were provided when semaphore was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroySemaphore-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroySemaphore-semaphore-parameter", + "text": " If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-vkDestroySemaphore-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroySemaphore-semaphore-parent", + "text": " If semaphore is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkImportSemaphoreWin32HandleKHR": { + "(VK_KHR_external_semaphore_win32)": [ + { + "vuid": "VUID-vkImportSemaphoreWin32HandleKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkImportSemaphoreWin32HandleKHR-pImportSemaphoreWin32HandleInfo-parameter", + "text": " pImportSemaphoreWin32HandleInfo must be a valid pointer to a valid VkImportSemaphoreWin32HandleInfoKHR structure" + } + ] + }, + "VkImportSemaphoreWin32HandleInfoKHR": { + "(VK_KHR_external_semaphore_win32)": [ + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01140", + "text": " handleType must be a value included in the &amp;lt;&amp;lt;synchronization-semaphore-handletypes-win32,Handle Types Supported by VkImportSemaphoreWin32HandleInfoKHR&amp;gt;&amp;gt; table." + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01466", + "text": " If handleType is not VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT or VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, name must be NULL." + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01467", + "text": " If handleType is not 0 and handle is NULL, name must name a valid synchronization primitive of the type specified by handleType." + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-01468", + "text": " If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType." + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01469", + "text": " If handle is not NULL, name must be NULL." + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handle-01542", + "text": " If handle is not NULL, it must obey any requirements listed for handleType in external semaphore handle types compatibility." + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-name-01543", + "text": " If name is not NULL, it must obey any requirements listed for handleType in external semaphore handle types compatibility." + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-semaphore-parameter", + "text": " semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkSemaphoreImportFlagBits values" + }, + { + "vuid": "VUID-VkImportSemaphoreWin32HandleInfoKHR-handleType-parameter", + "text": " If handleType is not 0, handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" + } + ] + }, + "vkImportSemaphoreFdKHR": { + "(VK_KHR_external_semaphore_fd)": [ + { + "vuid": "VUID-vkImportSemaphoreFdKHR-semaphore-01142", + "text": " semaphore must not be associated with any queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-vkImportSemaphoreFdKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkImportSemaphoreFdKHR-pImportSemaphoreFdInfo-parameter", + "text": " pImportSemaphoreFdInfo must be a valid pointer to a valid VkImportSemaphoreFdInfoKHR structure" + } + ] + }, + "VkImportSemaphoreFdInfoKHR": { + "(VK_KHR_external_semaphore_fd)": [ + { + "vuid": "VUID-VkImportSemaphoreFdInfoKHR-handleType-01143", + "text": " handleType must be a value included in the &amp;lt;&amp;lt;synchronization-semaphore-handletypes-fd,Handle Types Supported by VkImportSemaphoreFdInfoKHR&amp;gt;&amp;gt; table." + }, + { + "vuid": "VUID-VkImportSemaphoreFdInfoKHR-fd-01544", + "text": " fd must obey any requirements listed for handleType in &amp;lt;&amp;lt;external-semaphore-handle-types-compatibility,external semaphore handle types compatibility&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkImportSemaphoreFdInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR" + }, + { + "vuid": "VUID-VkImportSemaphoreFdInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImportSemaphoreFdInfoKHR-semaphore-parameter", + "text": " semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkImportSemaphoreFdInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkSemaphoreImportFlagBits values" + }, + { + "vuid": "VUID-VkImportSemaphoreFdInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" + } + ] + }, + "vkCreateEvent": { + "core": [ + { + "vuid": "VUID-vkCreateEvent-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateEvent-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkEventCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateEvent-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateEvent-pEvent-parameter", + "text": " pEvent must be a valid pointer to a VkEvent handle" + } + ] + }, + "VkEventCreateInfo": { + "core": [ + { + "vuid": "VUID-VkEventCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EVENT_CREATE_INFO" + }, + { + "vuid": "VUID-VkEventCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkEventCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkDestroyEvent": { + "core": [ + { + "vuid": "VUID-vkDestroyEvent-event-01145", + "text": " All submitted commands that refer to event must have completed execution" + }, + { + "vuid": "VUID-vkDestroyEvent-event-01146", + "text": " If VkAllocationCallbacks were provided when event was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyEvent-event-01147", + "text": " If no VkAllocationCallbacks were provided when event was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyEvent-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyEvent-event-parameter", + "text": " If event is not VK_NULL_HANDLE, event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkDestroyEvent-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyEvent-event-parent", + "text": " If event is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetEventStatus": { + "core": [ + { + "vuid": "VUID-vkGetEventStatus-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetEventStatus-event-parameter", + "text": " event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkGetEventStatus-event-parent", + "text": " event must have been created, allocated, or retrieved from device" + } + ] + }, + "vkSetEvent": { + "core": [ + { + "vuid": "VUID-vkSetEvent-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkSetEvent-event-parameter", + "text": " event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkSetEvent-event-parent", + "text": " event must have been created, allocated, or retrieved from device" + } + ] + }, + "vkResetEvent": { + "core": [ + { + "vuid": "VUID-vkResetEvent-event-01148", + "text": " event must not be waited on by a vkCmdWaitEvents command that is currently executing" + }, + { + "vuid": "VUID-vkResetEvent-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkResetEvent-event-parameter", + "text": " event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkResetEvent-event-parent", + "text": " event must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdSetEvent": { + "core": [ + { + "vuid": "VUID-vkCmdSetEvent-stageMask-01149", + "text": " stageMask must not include VK_PIPELINE_STAGE_HOST_BIT" + }, + { + "vuid": "VUID-vkCmdSetEvent-stageMask-01150", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdSetEvent-stageMask-01151", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdSetEvent-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetEvent-event-parameter", + "text": " event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkCmdSetEvent-stageMask-parameter", + "text": " stageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-vkCmdSetEvent-stageMask-requiredbitmask", + "text": " stageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdSetEvent-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetEvent-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdSetEvent-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdSetEvent-commonparent", + "text": " Both of commandBuffer, and event must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCmdSetEvent-commandBuffer-01152", + "text": " commandBuffer’s current device mask must include exactly one physical device." + } + ] + }, + "vkCmdResetEvent": { + "core": [ + { + "vuid": "VUID-vkCmdResetEvent-stageMask-01153", + "text": " stageMask must not include VK_PIPELINE_STAGE_HOST_BIT" + }, + { + "vuid": "VUID-vkCmdResetEvent-stageMask-01154", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdResetEvent-stageMask-01155", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, stageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdResetEvent-event-01156", + "text": " When this command executes, event must not be waited on by a vkCmdWaitEvents command that is currently executing" + }, + { + "vuid": "VUID-vkCmdResetEvent-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdResetEvent-event-parameter", + "text": " event must be a valid VkEvent handle" + }, + { + "vuid": "VUID-vkCmdResetEvent-stageMask-parameter", + "text": " stageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-vkCmdResetEvent-stageMask-requiredbitmask", + "text": " stageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdResetEvent-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdResetEvent-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdResetEvent-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdResetEvent-commonparent", + "text": " Both of commandBuffer, and event must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCmdResetEvent-commandBuffer-01157", + "text": " commandBuffer’s current device mask must include exactly one physical device." + } + ] + }, + "vkCmdWaitEvents": { + "core": [ + { + "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01158", + "text": " srcStageMask must be the bitwise OR of the stageMask parameter used in previous calls to vkCmdSetEvent with any of the members of pEvents and VK_PIPELINE_STAGE_HOST_BIT if any of the members of pEvents was set using vkSetEvent" + }, + { + "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01159", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdWaitEvents-dstStageMask-01160", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01161", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdWaitEvents-dstStageMask-01162", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdWaitEvents-pEvents-01163", + "text": " If pEvents includes one or more events that will be signaled by vkSetEvent after commandBuffer has been submitted to a queue, then vkCmdWaitEvents must not be called inside a render pass instance" + }, + { + "vuid": "VUID-vkCmdWaitEvents-srcStageMask-01164", + "text": " Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family specified by the queueFamilyIndex member of the VkCommandPoolCreateInfo structure that was used to create the VkCommandPool that commandBuffer was allocated from, as specified in the &amp;lt;&amp;lt;synchronization-pipeline-stages-supported, table of supported pipeline stages&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-01165", + "text": " Each element of pMemoryBarriers, pBufferMemoryBarriers or pImageMemoryBarriers must not have any access flag included in its srcAccessMask member if that bit is not supported by any of the pipeline stages in srcStageMask, as specified in the &amp;lt;&amp;lt;synchronization-access-types-supported, table of supported access types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-01166", + "text": " Each element of pMemoryBarriers, pBufferMemoryBarriers or pImageMemoryBarriers must not have any access flag included in its dstAccessMask member if that bit is not supported by any of the pipeline stages in dstStageMask, as specified in the &amp;lt;&amp;lt;synchronization-access-types-supported, table of supported access types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdWaitEvents-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWaitEvents-pEvents-parameter", + "text": " pEvents must be a valid pointer to an array of eventCount valid VkEvent handles" + }, + { + "vuid": "VUID-vkCmdWaitEvents-srcStageMask-parameter", + "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-vkCmdWaitEvents-srcStageMask-requiredbitmask", + "text": " srcStageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdWaitEvents-dstStageMask-parameter", + "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-vkCmdWaitEvents-dstStageMask-requiredbitmask", + "text": " dstStageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdWaitEvents-pMemoryBarriers-parameter", + "text": " If memoryBarrierCount is not 0, pMemoryBarriers must be a valid pointer to an array of memoryBarrierCount valid VkMemoryBarrier structures" + }, + { + "vuid": "VUID-vkCmdWaitEvents-pBufferMemoryBarriers-parameter", + "text": " If bufferMemoryBarrierCount is not 0, pBufferMemoryBarriers must be a valid pointer to an array of bufferMemoryBarrierCount valid VkBufferMemoryBarrier structures" + }, + { + "vuid": "VUID-vkCmdWaitEvents-pImageMemoryBarriers-parameter", + "text": " If imageMemoryBarrierCount is not 0, pImageMemoryBarriers must be a valid pointer to an array of imageMemoryBarrierCount valid VkImageMemoryBarrier structures" + }, + { + "vuid": "VUID-vkCmdWaitEvents-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdWaitEvents-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdWaitEvents-eventCount-arraylength", + "text": " eventCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdWaitEvents-commonparent", + "text": " Both of commandBuffer, and the elements of pEvents must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCmdWaitEvents-commandBuffer-01167", + "text": " commandBuffer’s current device mask must include exactly one physical device." + } + ] + }, + "vkCmdPipelineBarrier": { + "core": [ + { + "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-01168", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-01169", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-01170", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-01171", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-pDependencies-01172", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, the render pass must have been created with a VkSubpassDependency instance in pDependencies that expresses a dependency from the current subpass to itself." + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-01173", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, srcStageMask must contain a subset of the bit values in the srcStageMask member of that instance of VkSubpassDependency" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-01174", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, dstStageMask must contain a subset of the bit values in the dstStageMask member of that instance of VkSubpassDependency" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-srcAccessMask-01175", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, the srcAccessMask of any element of pMemoryBarriers or pImageMemoryBarriers must contain a subset of the bit values the srcAccessMask member of that instance of VkSubpassDependency" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dstAccessMask-01176", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, the dstAccessMask of any element of pMemoryBarriers or pImageMemoryBarriers must contain a subset of the bit values the dstAccessMask member of that instance of VkSubpassDependency" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dependencyFlags-01177", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, dependencyFlags must be equal to the dependencyFlags member of that instance of VkSubpassDependency" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, bufferMemoryBarrierCount must be 0" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-image-01179", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, the image member of any element of pImageMemoryBarriers must be equal to one of the elements of pAttachments that the current framebuffer was created with, that is also referred to by one of the elements of the pColorAttachments, pResolveAttachments or pDepthStencilAttachment members of the VkSubpassDescription instance that the current subpass was created with" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-oldLayout-01180", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, the oldLayout and newLayout members of any element of pImageMemoryBarriers must be equal to the layout member of an element of the pColorAttachments, pResolveAttachments or pDepthStencilAttachment members of the VkSubpassDescription instance that the current subpass was created with, that refers to the same image" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-oldLayout-01181", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, the oldLayout and newLayout members of an element of pImageMemoryBarriers must be equal" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182", + "text": " If vkCmdPipelineBarrier is called within a render pass instance, the srcQueueFamilyIndex and dstQueueFamilyIndex members of any element of pImageMemoryBarriers must be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-01183", + "text": " Any pipeline stage included in srcStageMask or dstStageMask must be supported by the capabilities of the queue family specified by the queueFamilyIndex member of the VkCommandPoolCreateInfo structure that was used to create the VkCommandPool that commandBuffer was allocated from, as specified in the &amp;lt;&amp;lt;synchronization-pipeline-stages-supported, table of supported pipeline stages&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01184", + "text": " Each element of pMemoryBarriers, pBufferMemoryBarriers and pImageMemoryBarriers must not have any access flag included in its srcAccessMask member if that bit is not supported by any of the pipeline stages in srcStageMask, as specified in the &amp;lt;&amp;lt;synchronization-access-types-supported, table of supported access types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-pMemoryBarriers-01185", + "text": " Each element of pMemoryBarriers, pBufferMemoryBarriers and pImageMemoryBarriers must not have any access flag included in its dstAccessMask member if that bit is not supported by any of the pipeline stages in dstStageMask, as specified in the &amp;lt;&amp;lt;synchronization-access-types-supported, table of supported access types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-parameter", + "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-srcStageMask-requiredbitmask", + "text": " srcStageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-parameter", + "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dstStageMask-requiredbitmask", + "text": " dstStageMask must not be 0" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-dependencyFlags-parameter", + "text": " dependencyFlags must be a valid combination of VkDependencyFlagBits values" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-pMemoryBarriers-parameter", + "text": " If memoryBarrierCount is not 0, pMemoryBarriers must be a valid pointer to an array of memoryBarrierCount valid VkMemoryBarrier structures" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-pBufferMemoryBarriers-parameter", + "text": " If bufferMemoryBarrierCount is not 0, pBufferMemoryBarriers must be a valid pointer to an array of bufferMemoryBarrierCount valid VkBufferMemoryBarrier structures" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-pImageMemoryBarriers-parameter", + "text": " If imageMemoryBarrierCount is not 0, pImageMemoryBarriers must be a valid pointer to an array of imageMemoryBarrierCount valid VkImageMemoryBarrier structures" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdPipelineBarrier-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdPipelineBarrier-dependencyFlags-01186", + "text": " If vkCmdPipelineBarrier is called outside of a render pass instance, dependencyFlags must not include VK_DEPENDENCY_VIEW_LOCAL_BIT" + } + ] + }, + "VkMemoryBarrier": { + "core": [ + { + "vuid": "VUID-VkMemoryBarrier-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_BARRIER" + }, + { + "vuid": "VUID-VkMemoryBarrier-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkMemoryBarrier-srcAccessMask-parameter", + "text": " srcAccessMask must be a valid combination of VkAccessFlagBits values" + }, + { + "vuid": "VUID-VkMemoryBarrier-dstAccessMask-parameter", + "text": " dstAccessMask must be a valid combination of VkAccessFlagBits values" + } + ] + }, + "VkBufferMemoryBarrier": { + "core": [ + { + "vuid": "VUID-VkBufferMemoryBarrier-offset-01187", + "text": " offset must be less than the size of buffer" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-size-01188", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be greater than 0" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-size-01189", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be less than or equal to than the size of buffer minus offset" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01196", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not VK_QUEUE_FAMILY_IGNORED, at least one of them must be the same as the family of the queue that will execute this barrier" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01931", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-srcAccessMask-parameter", + "text": " srcAccessMask must be a valid combination of VkAccessFlagBits values" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-dstAccessMask-parameter", + "text": " dstAccessMask must be a valid combination of VkAccessFlagBits values" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + } + ], + "!(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01190", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01192", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, srcQueueFamilyIndex and dstQueueFamilyIndex must either both be VK_QUEUE_FAMILY_IGNORED, or both be a valid queue family (see &amp;lt;&amp;lt;devsandqueues-queueprops&amp;gt;&amp;gt;)" + } + ], + "(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01191", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01763", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, the other must be VK_QUEUE_FAMILY_IGNORED or a special queue family reserved for external memory ownership transfers, as described in &amp;lt;&amp;lt;synchronization-queue-transfers&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01193", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, dstQueueFamilyIndex must also be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01764", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in &amp;lt;&amp;lt;synchronization-queue-transfers&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-01765", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and dstQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in &amp;lt;&amp;lt;synchronization-queue-transfers&amp;gt;&amp;gt;." + } + ] + }, + "VkImageMemoryBarrier": { + "core": [ + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01197", + "text": " oldLayout must be VK_IMAGE_LAYOUT_UNDEFINED or the current layout of the image subresources affected by the barrier" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-newLayout-01198", + "text": " newLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01205", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not VK_QUEUE_FAMILY_IGNORED, at least one of them must be the same as the family of the queue that will execute this barrier" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01486", + "text": " subresourceRange.baseMipLevel must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01724", + "text": " If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01488", + "text": " subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-01725", + "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01207", + "text": " If image has a depth/stencil format with both depth and stencil components, then the aspectMask member of subresourceRange must include both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01208", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01209", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01210", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01211", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01212", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01213", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01932", + "text": " If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkSampleLocationsInfoEXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-srcAccessMask-parameter", + "text": " srcAccessMask must be a valid combination of VkAccessFlagBits values" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-dstAccessMask-parameter", + "text": " dstAccessMask must be a valid combination of VkAccessFlagBits values" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-parameter", + "text": " oldLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-newLayout-parameter", + "text": " newLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-subresourceRange-parameter", + "text": " subresourceRange must be a valid VkImageSubresourceRange structure" + } + ], + "!(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkImageMemoryBarrier-image-01199", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex must both be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01200", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, srcQueueFamilyIndex and dstQueueFamilyIndex must either both be VK_QUEUE_FAMILY_IGNORED, or both be a valid queue family (see &amp;lt;&amp;lt;devsandqueues-queueprops&amp;gt;&amp;gt;)." + } + ], + "(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkImageMemoryBarrier-image-01381", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01766", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, the other must be VK_QUEUE_FAMILY_IGNORED or a special queue family reserved for external memory transfers, as described in &amp;lt;&amp;lt;synchronization-queue-transfers&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01201", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, dstQueueFamilyIndex must also be VK_QUEUE_FAMILY_IGNORED." + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01767", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and srcQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in &amp;lt;&amp;lt;synchronization-queue-transfers&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01768", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE and dstQueueFamilyIndex is not VK_QUEUE_FAMILY_IGNORED, it must be a valid queue family or a special queue family reserved for external memory transfers, as described in &amp;lt;&amp;lt;synchronization-queue-transfers&amp;gt;&amp;gt;." + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageMemoryBarrier-image-01671", + "text": " If image has a single-plane color format or is not disjoint, then the aspectMask member of subresourceRange must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01672", + "text": " If image has a multi-planar format and the image is disjoint, then the aspectMask member of subresourceRange must include either at least one of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, and VK_IMAGE_ASPECT_PLANE_2_BIT; or must include VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-01673", + "text": " If image has a multi-planar format with only two planes, then the aspectMask member of subresourceRange must not include VK_IMAGE_ASPECT_PLANE_2_BIT" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01658", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-oldLayout-01659", + "text": " If either oldLayout or newLayout is VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL then image must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + } + ] + }, + "vkQueueWaitIdle": { + "core": [ + { + "vuid": "VUID-vkQueueWaitIdle-queue-parameter", + "text": " queue must be a valid VkQueue handle" + } + ] + }, + "vkDeviceWaitIdle": { + "core": [ + { + "vuid": "VUID-vkDeviceWaitIdle-device-parameter", + "text": " device must be a valid VkDevice handle" + } + ] + }, + "vkCreateRenderPass": { + "core": [ + { + "vuid": "VUID-vkCreateRenderPass-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateRenderPass-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkRenderPassCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateRenderPass-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateRenderPass-pRenderPass-parameter", + "text": " pRenderPass must be a valid pointer to a VkRenderPass handle" + } + ] + }, + "VkRenderPassCreateInfo": { + "core": [ + { + "vuid": "VUID-VkRenderPassCreateInfo-None-00832", + "text": " If any two subpasses operate on attachments with overlapping ranges of the same VkDeviceMemory object, and at least one subpass writes to that area of VkDeviceMemory, a subpass dependency must be included (either directly or via some intermediate subpasses) between them" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-attachment-00833", + "text": " If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or the attachment indexed by any element of pPreserveAttachments in any element of pSubpasses is bound to a range of a VkDeviceMemory object that overlaps with any other attachment in any subpass (including the same subpass), the VkAttachmentDescription structures describing them must include VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT in flags" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-attachment-00834", + "text": " If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or any element of pPreserveAttachments in any element of pSubpasses is not VK_ATTACHMENT_UNUSED, it must be less than attachmentCount" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pPreserveAttachments-00835", + "text": " The value of each element of the pPreserveAttachments member in each element of pSubpasses must not be VK_ATTACHMENT_UNUSED" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-00836", + "text": " For any member of pAttachments with a loadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL." + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pDependencies-00837", + "text": " For any element of pDependencies, if the srcSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the srcStageMask member of that dependency must be a pipeline stage supported by the &amp;lt;&amp;lt;synchronization-pipeline-stages-types, pipeline&amp;gt;&amp;gt; identified by the pipelineBindPoint member of the source subpass." + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pDependencies-00838", + "text": " For any element of pDependencies, if the dstSubpass is not VK_SUBPASS_EXTERNAL, all stage flags included in the dstStageMask member of that dependency must be a pipeline stage supported by the &amp;lt;&amp;lt;synchronization-pipeline-stages-types, pipeline&amp;gt;&amp;gt; identified by the pipelineBindPoint member of the source subpass." + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkRenderPassInputAttachmentAspectCreateInfo or VkRenderPassMultiviewCreateInfo" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-parameter", + "text": " If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkAttachmentDescription structures" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pSubpasses-parameter", + "text": " pSubpasses must be a valid pointer to an array of subpassCount valid VkSubpassDescription structures" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pDependencies-parameter", + "text": " If dependencyCount is not 0, pDependencies must be a valid pointer to an array of dependencyCount valid VkSubpassDependency structures" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-subpassCount-arraylength", + "text": " subpassCount must be greater than 0" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-01566", + "text": " For any member of pAttachments with a loadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL." + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pAttachments-01567", + "text": " For any member of pAttachments with a stencilLoadOp equal to VK_ATTACHMENT_LOAD_OP_CLEAR, the first use of that attachment must not specify a layout equal to VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL." + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pNext-01926", + "text": " If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, the subpass member of each element of its pAspectReferences member must be less than subpassCount" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pNext-01927", + "text": " If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, the inputAttachmentIndex member of each element of its pAspectReferences member must be less than the value of inputAttachmentCount in the member of pSubpasses identified by its subpass member" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-VkRenderPassCreateInfo-pNext-01928", + "text": " If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, and its subpassCount member is not zero, that member must be equal to the value of subpassCount" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pNext-01929", + "text": " If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, if its dependencyCount member is not zero, it must be equal to dependencyCount" + }, + { + "vuid": "VUID-VkRenderPassCreateInfo-pNext-01930", + "text": " If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo, for each non-zero element of pViewOffsets, the srcSubpass and dstSubpass members of pDependencies at the same index must not be equal" + } + ] + }, + "VkRenderPassMultiviewCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-00841", + "text": " Each view index must not be set in more than one element of pCorrelationMasks" + }, + { + "vuid": "VUID-VkRenderPassMultiviewCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO" + }, + { + "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pViewMasks-parameter", + "text": " If subpassCount is not 0, pViewMasks must be a valid pointer to an array of subpassCount uint32_t values" + }, + { + "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pViewOffsets-parameter", + "text": " If dependencyCount is not 0, pViewOffsets must be a valid pointer to an array of dependencyCount int32_t values" + }, + { + "vuid": "VUID-VkRenderPassMultiviewCreateInfo-pCorrelationMasks-parameter", + "text": " If correlationMaskCount is not 0, pCorrelationMasks must be a valid pointer to an array of correlationMaskCount uint32_t values" + } + ] + }, + "VkAttachmentDescription": { + "core": [ + { + "vuid": "VUID-VkAttachmentDescription-finalLayout-00843", + "text": " finalLayout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED" + }, + { + "vuid": "VUID-VkAttachmentDescription-flags-parameter", + "text": " flags must be a valid combination of VkAttachmentDescriptionFlagBits values" + }, + { + "vuid": "VUID-VkAttachmentDescription-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkAttachmentDescription-samples-parameter", + "text": " samples must be a valid VkSampleCountFlagBits value" + }, + { + "vuid": "VUID-VkAttachmentDescription-loadOp-parameter", + "text": " loadOp must be a valid VkAttachmentLoadOp value" + }, + { + "vuid": "VUID-VkAttachmentDescription-storeOp-parameter", + "text": " storeOp must be a valid VkAttachmentStoreOp value" + }, + { + "vuid": "VUID-VkAttachmentDescription-stencilLoadOp-parameter", + "text": " stencilLoadOp must be a valid VkAttachmentLoadOp value" + }, + { + "vuid": "VUID-VkAttachmentDescription-stencilStoreOp-parameter", + "text": " stencilStoreOp must be a valid VkAttachmentStoreOp value" + }, + { + "vuid": "VUID-VkAttachmentDescription-initialLayout-parameter", + "text": " initialLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkAttachmentDescription-finalLayout-parameter", + "text": " finalLayout must be a valid VkImageLayout value" + } + ] + }, + "VkRenderPassInputAttachmentAspectCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkRenderPassInputAttachmentAspectCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO" + }, + { + "vuid": "VUID-VkRenderPassInputAttachmentAspectCreateInfo-pAspectReferences-parameter", + "text": " pAspectReferences must be a valid pointer to an array of aspectReferenceCount valid VkInputAttachmentAspectReference structures" + }, + { + "vuid": "VUID-VkRenderPassInputAttachmentAspectCreateInfo-aspectReferenceCount-arraylength", + "text": " aspectReferenceCount must be greater than 0" + } + ] + }, + "VkInputAttachmentAspectReference": { + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkInputAttachmentAspectReference-pCreateInfo-01568", + "text": " There must be an input attachment at pCreateInfo::pSubpasses[subpass].pInputAttachments[inputAttachmentIndex]." + }, + { + "vuid": "VUID-VkInputAttachmentAspectReference-None-01569", + "text": " The specified input attachment must have more than one aspect mask." + }, + { + "vuid": "VUID-VkInputAttachmentAspectReference-aspectMask-01570", + "text": " aspectMask must be a subset of the aspect masks in the specified input attachment." + }, + { + "vuid": "VUID-VkInputAttachmentAspectReference-aspectMask-parameter", + "text": " aspectMask must be a valid combination of VkImageAspectFlagBits values" + }, + { + "vuid": "VUID-VkInputAttachmentAspectReference-aspectMask-requiredbitmask", + "text": " aspectMask must not be 0" + } + ] + }, + "VkSubpassDescription": { + "core": [ + { + "vuid": "VUID-VkSubpassDescription-pipelineBindPoint-00844", + "text": " pipelineBindPoint must be VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-VkSubpassDescription-colorAttachmentCount-00845", + "text": " colorAttachmentCount must be less than or equal to VkPhysicalDeviceLimits::maxColorAttachments" + }, + { + "vuid": "VUID-VkSubpassDescription-loadOp-00846", + "text": " If the first use of an attachment in this render pass is as an input attachment, and the attachment is not also used as a color or depth/stencil attachment in the same subpass, then loadOp must not be VK_ATTACHMENT_LOAD_OP_CLEAR" + }, + { + "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00847", + "text": " If pResolveAttachments is not NULL, for each resolve attachment that does not have the value VK_ATTACHMENT_UNUSED, the corresponding color attachment must not have the value VK_ATTACHMENT_UNUSED" + }, + { + "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00848", + "text": " If pResolveAttachments is not NULL, the sample count of each element of pColorAttachments must be anything other than VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00849", + "text": " Each element of pResolveAttachments must have a sample count of VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkSubpassDescription-pResolveAttachments-00850", + "text": " Each element of pResolveAttachments must have the same VkFormat as its corresponding color attachment" + }, + { + "vuid": "VUID-VkSubpassDescription-pColorAttachments-01417", + "text": " All attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have the same sample count" + }, + { + "vuid": "VUID-VkSubpassDescription-None-00852", + "text": " If any input attachments are VK_ATTACHMENT_UNUSED, then any pipelines bound during the subpass must not access those input attachments from the fragment shader" + }, + { + "vuid": "VUID-VkSubpassDescription-attachment-00853", + "text": " The attachment member of each element of pPreserveAttachments must not be VK_ATTACHMENT_UNUSED" + }, + { + "vuid": "VUID-VkSubpassDescription-pPreserveAttachments-00854", + "text": " Each element of pPreserveAttachments must not also be an element of any other member of the subpass description" + }, + { + "vuid": "VUID-VkSubpassDescription-layout-00855", + "text": " If any attachment is used as both an input attachment and a color or depth/stencil attachment, then each use must use the same layout" + }, + { + "vuid": "VUID-VkSubpassDescription-flags-parameter", + "text": " flags must be a valid combination of VkSubpassDescriptionFlagBits values" + }, + { + "vuid": "VUID-VkSubpassDescription-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-VkSubpassDescription-pInputAttachments-parameter", + "text": " If inputAttachmentCount is not 0, pInputAttachments must be a valid pointer to an array of inputAttachmentCount valid VkAttachmentReference structures" + }, + { + "vuid": "VUID-VkSubpassDescription-pColorAttachments-parameter", + "text": " If colorAttachmentCount is not 0, pColorAttachments must be a valid pointer to an array of colorAttachmentCount valid VkAttachmentReference structures" + }, + { + "vuid": "VUID-VkSubpassDescription-pResolveAttachments-parameter", + "text": " If colorAttachmentCount is not 0, and pResolveAttachments is not NULL, pResolveAttachments must be a valid pointer to an array of colorAttachmentCount valid VkAttachmentReference structures" + }, + { + "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-parameter", + "text": " If pDepthStencilAttachment is not NULL, pDepthStencilAttachment must be a valid pointer to a valid VkAttachmentReference structure" + }, + { + "vuid": "VUID-VkSubpassDescription-pPreserveAttachments-parameter", + "text": " If preserveAttachmentCount is not 0, pPreserveAttachments must be a valid pointer to an array of preserveAttachmentCount uint32_t values" + } + ], + "(VK_AMD_mixed_attachment_samples)": [ + { + "vuid": "VUID-VkSubpassDescription-pColorAttachments-01506", + "text": " All attachments in pColorAttachments that are not VK_ATTACHMENT_UNUSED must have a sample count that is smaller than or equal to the sample count of pDepthStencilAttachment if it is not VK_ATTACHMENT_UNUSED" + } + ], + "!(VK_AMD_mixed_attachment_samples)+!(VK_NV_framebuffer_mixed_samples)": [ + { + "vuid": "VUID-VkSubpassDescription-pDepthStencilAttachment-01418", + "text": " If pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and any attachments in pColorAttachments are not VK_ATTACHMENT_UNUSED, they must have the same sample count" + } + ], + "(VK_NVX_multiview_per_view_attributes)": [ + { + "vuid": "VUID-VkSubpassDescription-flags-00856", + "text": " If flags includes VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, it must also include VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX." + } + ] + }, + "VkAttachmentReference": { + "core": [ + { + "vuid": "VUID-VkAttachmentReference-layout-00857", + "text": " layout must not be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED" + }, + { + "vuid": "VUID-VkAttachmentReference-layout-parameter", + "text": " layout must be a valid VkImageLayout value" + } + ] + }, + "VkSubpassDependency": { + "core": [ + { + "vuid": "VUID-VkSubpassDependency-srcSubpass-00858", + "text": " If srcSubpass is not VK_SUBPASS_EXTERNAL, srcStageMask must not include VK_PIPELINE_STAGE_HOST_BIT" + }, + { + "vuid": "VUID-VkSubpassDependency-dstSubpass-00859", + "text": " If dstSubpass is not VK_SUBPASS_EXTERNAL, dstStageMask must not include VK_PIPELINE_STAGE_HOST_BIT" + }, + { + "vuid": "VUID-VkSubpassDependency-srcStageMask-00860", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-VkSubpassDependency-dstStageMask-00861", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT" + }, + { + "vuid": "VUID-VkSubpassDependency-srcStageMask-00862", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, srcStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-VkSubpassDependency-dstStageMask-00863", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, dstStageMask must not contain VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT" + }, + { + "vuid": "VUID-VkSubpassDependency-srcSubpass-00864", + "text": " srcSubpass must be less than or equal to dstSubpass, unless one of them is VK_SUBPASS_EXTERNAL, to avoid cyclic dependencies and ensure a valid execution order" + }, + { + "vuid": "VUID-VkSubpassDependency-srcSubpass-00865", + "text": " srcSubpass and dstSubpass must not both be equal to VK_SUBPASS_EXTERNAL" + }, + { + "vuid": "VUID-VkSubpassDependency-srcSubpass-00866", + "text": " If srcSubpass is equal to dstSubpass, srcStageMask and dstStageMask must only contain one of VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, or VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT" + }, + { + "vuid": "VUID-VkSubpassDependency-srcSubpass-00867", + "text": " If srcSubpass is equal to dstSubpass and not all of the stages in srcStageMask and dstStageMask are &amp;lt;&amp;lt;synchronization-framebuffer-regions,framebuffer-space stages&amp;gt;&amp;gt;, the &amp;lt;&amp;lt;synchronization-pipeline-stages-order, logically latest&amp;gt;&amp;gt; pipeline stage in srcStageMask must be &amp;lt;&amp;lt;synchronization-pipeline-stages-order, logically earlier&amp;gt;&amp;gt; than or equal to the &amp;lt;&amp;lt;synchronization-pipeline-stages-order, logically earliest&amp;gt;&amp;gt; pipeline stage in dstStageMask" + }, + { + "vuid": "VUID-VkSubpassDependency-srcAccessMask-00868", + "text": " Any access flag included in srcAccessMask must be supported by one of the pipeline stages in srcStageMask, as specified in the &amp;lt;&amp;lt;synchronization-access-types-supported, table of supported access types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkSubpassDependency-dstAccessMask-00869", + "text": " Any access flag included in dstAccessMask must be supported by one of the pipeline stages in dstStageMask, as specified in the &amp;lt;&amp;lt;synchronization-access-types-supported, table of supported access types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkSubpassDependency-srcStageMask-parameter", + "text": " srcStageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-VkSubpassDependency-srcStageMask-requiredbitmask", + "text": " srcStageMask must not be 0" + }, + { + "vuid": "VUID-VkSubpassDependency-dstStageMask-parameter", + "text": " dstStageMask must be a valid combination of VkPipelineStageFlagBits values" + }, + { + "vuid": "VUID-VkSubpassDependency-dstStageMask-requiredbitmask", + "text": " dstStageMask must not be 0" + }, + { + "vuid": "VUID-VkSubpassDependency-srcAccessMask-parameter", + "text": " srcAccessMask must be a valid combination of VkAccessFlagBits values" + }, + { + "vuid": "VUID-VkSubpassDependency-dstAccessMask-parameter", + "text": " dstAccessMask must be a valid combination of VkAccessFlagBits values" + }, + { + "vuid": "VUID-VkSubpassDependency-dependencyFlags-parameter", + "text": " dependencyFlags must be a valid combination of VkDependencyFlagBits values" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-VkSubpassDependency-dependencyFlags-00870", + "text": " If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, then both srcSubpass and dstSubpass must not equal VK_SUBPASS_EXTERNAL" + }, + { + "vuid": "VUID-VkSubpassDependency-dependencyFlags-00871", + "text": " If dependencyFlags includes VK_DEPENDENCY_VIEW_LOCAL_BIT, then the render pass must have multiview enabled" + }, + { + "vuid": "VUID-VkSubpassDependency-srcSubpass-00872", + "text": " If srcSubpass equals dstSubpass and that subpass has more than one bit set in the view mask, then dependencyFlags must include VK_DEPENDENCY_VIEW_LOCAL_BIT" + } + ] + }, + "vkDestroyRenderPass": { + "core": [ + { + "vuid": "VUID-vkDestroyRenderPass-renderPass-00873", + "text": " All submitted commands that refer to renderPass must have completed execution" + }, + { + "vuid": "VUID-vkDestroyRenderPass-renderPass-00874", + "text": " If VkAllocationCallbacks were provided when renderPass was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyRenderPass-renderPass-00875", + "text": " If no VkAllocationCallbacks were provided when renderPass was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyRenderPass-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyRenderPass-renderPass-parameter", + "text": " If renderPass is not VK_NULL_HANDLE, renderPass must be a valid VkRenderPass handle" + }, + { + "vuid": "VUID-vkDestroyRenderPass-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyRenderPass-renderPass-parent", + "text": " If renderPass is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateFramebuffer": { + "core": [ + { + "vuid": "VUID-vkCreateFramebuffer-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateFramebuffer-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkFramebufferCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateFramebuffer-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateFramebuffer-pFramebuffer-parameter", + "text": " pFramebuffer must be a valid pointer to a VkFramebuffer handle" + } + ] + }, + "VkFramebufferCreateInfo": { + "core": [ + { + "vuid": "VUID-VkFramebufferCreateInfo-attachmentCount-00876", + "text": " attachmentCount must be equal to the attachment count specified in renderPass" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00877", + "text": " Each element of pAttachments that is used as a color attachment or resolve attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00878", + "text": " Each element of pAttachments that is used as a depth/stencil attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00879", + "text": " Each element of pAttachments that is used as an input attachment by renderPass must have been created with a usage value including VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00880", + "text": " Each element of pAttachments must have been created with an VkFormat value that matches the VkFormat specified by the corresponding VkAttachmentDescription in renderPass" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00881", + "text": " Each element of pAttachments must have been created with a samples value that matches the samples value specified by the corresponding VkAttachmentDescription in renderPass" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00882", + "text": " Each element of pAttachments must have dimensions at least as large as the corresponding framebuffer dimension" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00883", + "text": " Each element of pAttachments must only specify a single mip level" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00884", + "text": " Each element of pAttachments must have been created with the identity swizzle" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-width-00885", + "text": " width must be greater than 0." + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-width-00886", + "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferWidth" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-height-00887", + "text": " height must be greater than 0." + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-height-00888", + "text": " height must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferHeight" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-layers-00889", + "text": " layers must be greater than 0." + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-layers-00890", + "text": " layers must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferLayers" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-renderPass-parameter", + "text": " renderPass must be a valid VkRenderPass handle" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-parameter", + "text": " If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkImageView handles" + }, + { + "vuid": "VUID-VkFramebufferCreateInfo-commonparent", + "text": " Both of renderPass, and the elements of pAttachments that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkFramebufferCreateInfo-pAttachments-00891", + "text": " Each element of pAttachments that is a 2D or 2D array image view taken from a 3D image must not be a depth/stencil format" + } + ] + }, + "vkDestroyFramebuffer": { + "core": [ + { + "vuid": "VUID-vkDestroyFramebuffer-framebuffer-00892", + "text": " All submitted commands that refer to framebuffer must have completed execution" + }, + { + "vuid": "VUID-vkDestroyFramebuffer-framebuffer-00893", + "text": " If VkAllocationCallbacks were provided when framebuffer was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyFramebuffer-framebuffer-00894", + "text": " If no VkAllocationCallbacks were provided when framebuffer was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyFramebuffer-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyFramebuffer-framebuffer-parameter", + "text": " If framebuffer is not VK_NULL_HANDLE, framebuffer must be a valid VkFramebuffer handle" + }, + { + "vuid": "VUID-vkDestroyFramebuffer-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyFramebuffer-framebuffer-parent", + "text": " If framebuffer is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdBeginRenderPass": { + "core": [ + { + "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00895", + "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL then the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00897", + "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL then the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with VK_IMAGE_USAGE_SAMPLED_BIT or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00898", + "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL then the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT set" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00899", + "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL then the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT set" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00900", + "text": " If any of the initialLayout members of the VkAttachmentDescription structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is not VK_IMAGE_LAYOUT_UNDEFINED, then each such initialLayout must be equal to the current layout of the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-srcStageMask-00901", + "text": " The srcStageMask and dstStageMask members of any element of the pDependencies member of VkRenderPassCreateInfo used to create renderPass must be supported by the capabilities of the queue family identified by the queueFamilyIndex member of the VkCommandPoolCreateInfo used to create the command pool which commandBuffer was allocated from." + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-pRenderPassBegin-parameter", + "text": " pRenderPassBegin must be a valid pointer to a valid VkRenderPassBeginInfo structure" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-contents-parameter", + "text": " contents must be a valid VkSubpassContents value" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBeginRenderPass-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-00896", + "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-01758", + "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, or VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL then the corresponding attachment image subresource of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT set" + } + ] + }, + "VkRenderPassBeginInfo": { + "core": [ + { + "vuid": "VUID-VkRenderPassBeginInfo-clearValueCount-00902", + "text": " clearValueCount must be greater than the largest attachment index in renderPass that specifies a loadOp (or stencilLoadOp, if the attachment has a depth/stencil format) of VK_ATTACHMENT_LOAD_OP_CLEAR" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-clearValueCount-00903", + "text": " If clearValueCount is not 0, pClearValues must be a valid pointer to an array of clearValueCount valid VkClearValue unions" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-renderPass-00904", + "text": " renderPass must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the renderPass member of the VkFramebufferCreateInfo structure specified when creating framebuffer." + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupRenderPassBeginInfo or VkRenderPassSampleLocationsBeginInfoEXT" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-renderPass-parameter", + "text": " renderPass must be a valid VkRenderPass handle" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-parameter", + "text": " framebuffer must be a valid VkFramebuffer handle" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-commonparent", + "text": " Both of framebuffer, and renderPass must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkRenderPassSampleLocationsBeginInfoEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkRenderPassSampleLocationsBeginInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT" + }, + { + "vuid": "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pAttachmentInitialSampleLocations-parameter", + "text": " If attachmentInitialSampleLocationsCount is not 0, pAttachmentInitialSampleLocations must be a valid pointer to an array of attachmentInitialSampleLocationsCount valid VkAttachmentSampleLocationsEXT structures" + }, + { + "vuid": "VUID-VkRenderPassSampleLocationsBeginInfoEXT-pPostSubpassSampleLocations-parameter", + "text": " If postSubpassSampleLocationsCount is not 0, pPostSubpassSampleLocations must be a valid pointer to an array of postSubpassSampleLocationsCount valid VkSubpassSampleLocationsEXT structures" + } + ] + }, + "VkAttachmentSampleLocationsEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkAttachmentSampleLocationsEXT-attachmentIndex-01531", + "text": " attachmentIndex must be less than the attachmentCount specified in VkRenderPassCreateInfo the render pass specified by VkRenderPassBeginInfo::renderPass was created with" + }, + { + "vuid": "VUID-VkAttachmentSampleLocationsEXT-sampleLocationsInfo-parameter", + "text": " sampleLocationsInfo must be a valid VkSampleLocationsInfoEXT structure" + } + ] + }, + "VkSubpassSampleLocationsEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkSubpassSampleLocationsEXT-subpassIndex-01532", + "text": " subpassIndex must be less than the subpassCount specified in VkRenderPassCreateInfo the render pass specified by VkRenderPassBeginInfo::renderPass was created with" + }, + { + "vuid": "VUID-VkSubpassSampleLocationsEXT-sampleLocationsInfo-parameter", + "text": " sampleLocationsInfo must be a valid VkSampleLocationsInfoEXT structure" + } + ] + }, + "VkDeviceGroupRenderPassBeginInfo": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00905", + "text": " deviceMask must be a valid device mask value" + }, + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00906", + "text": " deviceMask must not be zero" + }, + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceMask-00907", + "text": " deviceMask must be a subset of the command buffer’s initial device mask" + }, + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-deviceRenderAreaCount-00908", + "text": " deviceRenderAreaCount must either be zero or equal to the number of physical devices in the logical device." + }, + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO" + }, + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-pDeviceRenderAreas-parameter", + "text": " If deviceRenderAreaCount is not 0, pDeviceRenderAreas must be a valid pointer to an array of deviceRenderAreaCount VkRect2D structures" + } + ] + }, + "vkGetRenderAreaGranularity": { + "core": [ + { + "vuid": "VUID-vkGetRenderAreaGranularity-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRenderAreaGranularity-renderPass-parameter", + "text": " renderPass must be a valid VkRenderPass handle" + }, + { + "vuid": "VUID-vkGetRenderAreaGranularity-pGranularity-parameter", + "text": " pGranularity must be a valid pointer to a VkExtent2D structure" + }, + { + "vuid": "VUID-vkGetRenderAreaGranularity-renderPass-parent", + "text": " renderPass must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdNextSubpass": { + "core": [ + { + "vuid": "VUID-vkCmdNextSubpass-None-00909", + "text": " The current subpass index must be less than the number of subpasses in the render pass minus one" + }, + { + "vuid": "VUID-vkCmdNextSubpass-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdNextSubpass-contents-parameter", + "text": " contents must be a valid VkSubpassContents value" + }, + { + "vuid": "VUID-vkCmdNextSubpass-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdNextSubpass-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdNextSubpass-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdNextSubpass-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "vkCmdEndRenderPass": { + "core": [ + { + "vuid": "VUID-vkCmdEndRenderPass-None-00910", + "text": " The current subpass index must be equal to the number of subpasses in the render pass minus one" + }, + { + "vuid": "VUID-vkCmdEndRenderPass-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdEndRenderPass-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdEndRenderPass-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdEndRenderPass-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdEndRenderPass-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "vkCreateShaderModule": { + "core": [ + { + "vuid": "VUID-vkCreateShaderModule-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateShaderModule-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkShaderModuleCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateShaderModule-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateShaderModule-pShaderModule-parameter", + "text": " pShaderModule must be a valid pointer to a VkShaderModule handle" + } + ] + }, + "VkShaderModuleCreateInfo": { + "core": [ + { + "vuid": "VUID-VkShaderModuleCreateInfo-codeSize-01085", + "text": " codeSize must be greater than 0" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01089", + "text": " pCode must declare the Shader capability for SPIR-V code" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01090", + "text": " pCode must not declare any capability that is not supported by the API, as described by the &amp;lt;&amp;lt;spirvenv-module-validation, Capabilities&amp;gt;&amp;gt; section of the &amp;lt;&amp;lt;spirvenv-capabilities,SPIR-V Environment&amp;gt;&amp;gt; appendix" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01091", + "text": " If pCode declares any of the capabilities listed as optional in the &amp;lt;&amp;lt;spirvenv-capabilities-table,SPIR-V Environment&amp;gt;&amp;gt; appendix, the corresponding feature(s) must be enabled." + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkShaderModuleValidationCacheCreateInfoEXT" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-parameter", + "text": " pCode must be a valid pointer to an array of \\(codeSize \\over 4\\) uint32_t values" + } + ], + "!(VK_NV_glsl_shader)": [ + { + "vuid": "VUID-VkShaderModuleCreateInfo-codeSize-01086", + "text": " codeSize must be a multiple of 4" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01087", + "text": " pCode must point to valid SPIR-V code, formatted and packed as described by the &amp;lt;&amp;lt;spirv-spec,Khronos SPIR-V Specification&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01088", + "text": " pCode must adhere to the validation rules described by the &amp;lt;&amp;lt;spirvenv-module-validation, Validation Rules within a Module&amp;gt;&amp;gt; section of the &amp;lt;&amp;lt;spirvenv-capabilities,SPIR-V Environment&amp;gt;&amp;gt; appendix" + } + ], + "(VK_NV_glsl_shader)": [ + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01376", + "text": " If pCode points to SPIR-V code, codeSize must be a multiple of 4" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01377", + "text": " pCode must point to either valid SPIR-V code, formatted and packed as described by the Khronos SPIR-V Specification or valid GLSL code which must be written to the GL_KHR_vulkan_glsl extension specification" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01378", + "text": " If pCode points to SPIR-V code, that code must adhere to the validation rules described by the &amp;lt;&amp;lt;spirvenv-module-validation, Validation Rules within a Module&amp;gt;&amp;gt; section of the &amp;lt;&amp;lt;spirvenv-capabilities,SPIR-V Environment&amp;gt;&amp;gt; appendix" + }, + { + "vuid": "VUID-VkShaderModuleCreateInfo-pCode-01379", + "text": " If pCode points to GLSL code, it must be valid GLSL code written to the GL_KHR_vulkan_glsl GLSL extension specification" + } + ] + }, + "VkShaderModuleValidationCacheCreateInfoEXT": { + "(VK_EXT_validation_cache)": [ + { + "vuid": "VUID-VkShaderModuleValidationCacheCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkShaderModuleValidationCacheCreateInfoEXT-validationCache-parameter", + "text": " validationCache must be a valid VkValidationCacheEXT handle" + } + ] + }, + "vkDestroyShaderModule": { + "core": [ + { + "vuid": "VUID-vkDestroyShaderModule-shaderModule-01092", + "text": " If VkAllocationCallbacks were provided when shaderModule was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyShaderModule-shaderModule-01093", + "text": " If no VkAllocationCallbacks were provided when shaderModule was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyShaderModule-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyShaderModule-shaderModule-parameter", + "text": " If shaderModule is not VK_NULL_HANDLE, shaderModule must be a valid VkShaderModule handle" + }, + { + "vuid": "VUID-vkDestroyShaderModule-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyShaderModule-shaderModule-parent", + "text": " If shaderModule is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateValidationCacheEXT": { + "(VK_EXT_validation_cache)": [ + { + "vuid": "VUID-vkCreateValidationCacheEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateValidationCacheEXT-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkValidationCacheCreateInfoEXT structure" + }, + { + "vuid": "VUID-vkCreateValidationCacheEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateValidationCacheEXT-pValidationCache-parameter", + "text": " pValidationCache must be a valid pointer to a VkValidationCacheEXT handle" + } + ] + }, + "VkValidationCacheCreateInfoEXT": { + "(VK_EXT_validation_cache)": [ + { + "vuid": "VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01534", + "text": " If initialDataSize is not 0, it must be equal to the size of pInitialData, as returned by vkGetValidationCacheDataEXT when pInitialData was originally retrieved" + }, + { + "vuid": "VUID-VkValidationCacheCreateInfoEXT-initialDataSize-01535", + "text": " If initialDataSize is not 0, pInitialData must have been retrieved from a previous call to vkGetValidationCacheDataEXT" + }, + { + "vuid": "VUID-VkValidationCacheCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkValidationCacheCreateInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkValidationCacheCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkValidationCacheCreateInfoEXT-pInitialData-parameter", + "text": " If initialDataSize is not 0, pInitialData must be a valid pointer to an array of initialDataSize bytes" + } + ] + }, + "vkMergeValidationCachesEXT": { + "(VK_EXT_validation_cache)": [ + { + "vuid": "VUID-vkMergeValidationCachesEXT-dstCache-01536", + "text": " dstCache must not appear in the list of source caches" + }, + { + "vuid": "VUID-vkMergeValidationCachesEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkMergeValidationCachesEXT-dstCache-parameter", + "text": " dstCache must be a valid VkValidationCacheEXT handle" + }, + { + "vuid": "VUID-vkMergeValidationCachesEXT-pSrcCaches-parameter", + "text": " pSrcCaches must be a valid pointer to an array of srcCacheCount valid VkValidationCacheEXT handles" + }, + { + "vuid": "VUID-vkMergeValidationCachesEXT-srcCacheCount-arraylength", + "text": " srcCacheCount must be greater than 0" + }, + { + "vuid": "VUID-vkMergeValidationCachesEXT-dstCache-parent", + "text": " dstCache must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkMergeValidationCachesEXT-pSrcCaches-parent", + "text": " Each element of pSrcCaches must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetValidationCacheDataEXT": { + "(VK_EXT_validation_cache)": [ + { + "vuid": "VUID-vkGetValidationCacheDataEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetValidationCacheDataEXT-validationCache-parameter", + "text": " validationCache must be a valid VkValidationCacheEXT handle" + }, + { + "vuid": "VUID-vkGetValidationCacheDataEXT-pDataSize-parameter", + "text": " pDataSize must be a valid pointer to a size_t value" + }, + { + "vuid": "VUID-vkGetValidationCacheDataEXT-pData-parameter", + "text": " If the value referenced by pDataSize is not 0, and pData is not NULL, pData must be a valid pointer to an array of pDataSize bytes" + }, + { + "vuid": "VUID-vkGetValidationCacheDataEXT-validationCache-parent", + "text": " validationCache must have been created, allocated, or retrieved from device" + } + ] + }, + "vkDestroyValidationCacheEXT": { + "(VK_EXT_validation_cache)": [ + { + "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-01537", + "text": " If VkAllocationCallbacks were provided when validationCache was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-01538", + "text": " If no VkAllocationCallbacks were provided when validationCache was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyValidationCacheEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-parameter", + "text": " If validationCache is not VK_NULL_HANDLE, validationCache must be a valid VkValidationCacheEXT handle" + }, + { + "vuid": "VUID-vkDestroyValidationCacheEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyValidationCacheEXT-validationCache-parent", + "text": " If validationCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateComputePipelines": { + "core": [ + { + "vuid": "VUID-vkCreateComputePipelines-flags-00695", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" + }, + { + "vuid": "VUID-vkCreateComputePipelines-flags-00696", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" + }, + { + "vuid": "VUID-vkCreateComputePipelines-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateComputePipelines-pipelineCache-parameter", + "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkCreateComputePipelines-pCreateInfos-parameter", + "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkComputePipelineCreateInfo structures" + }, + { + "vuid": "VUID-vkCreateComputePipelines-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateComputePipelines-pPipelines-parameter", + "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" + }, + { + "vuid": "VUID-vkCreateComputePipelines-createInfoCount-arraylength", + "text": " createInfoCount must be greater than 0" + }, + { + "vuid": "VUID-vkCreateComputePipelines-pipelineCache-parent", + "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "VkComputePipelineCreateInfo": { + "core": [ + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-00697", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a compute VkPipeline" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-00698", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-00699", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-00700", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-stage-00701", + "text": " The stage member of stage must be VK_SHADER_STAGE_COMPUTE_BIT" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-stage-00702", + "text": " The shader code for the entry point identified by stage and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the &amp;lt;&amp;lt;interfaces,Shader Interfaces&amp;gt;&amp;gt; chapter" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-layout-00703", + "text": " layout must be &amp;lt;&amp;lt;descriptorsets-pipelinelayout-consistency,consistent&amp;gt;&amp;gt; with the layout of the compute shader specified in stage" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-layout-01687", + "text": " The number of resources in layout accessible to the compute shader stage must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-stage-parameter", + "text": " stage must be a valid VkPipelineShaderStageCreateInfo structure" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-commonparent", + "text": " Both of basePipelineHandle, and layout that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkPipelineShaderStageCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00704", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, stage must not be VK_SHADER_STAGE_GEOMETRY_BIT" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00705", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, stage must not be VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT or VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00706", + "text": " stage must not be VK_SHADER_STAGE_ALL_GRAPHICS, or VK_SHADER_STAGE_ALL" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-pName-00707", + "text": " pName must be the name of an OpEntryPoint in module with an execution model that matches stage" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxClipDistances-00708", + "text": " If the identified entry point includes any variable in its interface that is declared with the ClipDistance BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxClipDistances" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxCullDistances-00709", + "text": " If the identified entry point includes any variable in its interface that is declared with the CullDistance BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxCullDistances" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxCombinedClipAndCullDistances-00710", + "text": " If the identified entry point includes any variables in its interface that are declared with the ClipDistance or CullDistance BuiltIn decoration, those variables must not have array sizes which sum to more than VkPhysicalDeviceLimits::maxCombinedClipAndCullDistances" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-maxSampleMaskWords-00711", + "text": " If the identified entry point includes any variable in its interface that is declared with the SampleMask BuiltIn decoration, that variable must not have an array size greater than VkPhysicalDeviceLimits::maxSampleMaskWords" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00712", + "text": " If stage is VK_SHADER_STAGE_VERTEX_BIT, the identified entry point must not include any input variable in its interface that is decorated with CullDistance" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00713", + "text": " If stage is VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT or VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, and the identified entry point has an OpExecutionMode instruction that specifies a patch size with OutputVertices, the patch size must be greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxTessellationPatchSize" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00714", + "text": " If stage is VK_SHADER_STAGE_GEOMETRY_BIT, the identified entry point must have an OpExecutionMode instruction that specifies a maximum output vertex count that is greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxGeometryOutputVertices" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00715", + "text": " If stage is VK_SHADER_STAGE_GEOMETRY_BIT, the identified entry point must have an OpExecutionMode instruction that specifies an invocation count that is greater than 0 and less than or equal to VkPhysicalDeviceLimits::maxGeometryShaderInvocations" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00716", + "text": " If stage is VK_SHADER_STAGE_GEOMETRY_BIT, and the identified entry point writes to Layer for any primitive, it must write the same value to Layer for all vertices of a given primitive" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00717", + "text": " If stage is VK_SHADER_STAGE_GEOMETRY_BIT, and the identified entry point writes to ViewportIndex for any primitive, it must write the same value to ViewportIndex for all vertices of a given primitive" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00718", + "text": " If stage is VK_SHADER_STAGE_FRAGMENT_BIT, the identified entry point must not include any output variables in its interface decorated with CullDistance" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-00719", + "text": " If stage is VK_SHADER_STAGE_FRAGMENT_BIT, and the identified entry point writes to FragDepth in any execution path, it must write to FragDepth in all execution paths" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-parameter", + "text": " stage must be a valid VkShaderStageFlagBits value" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-module-parameter", + "text": " module must be a valid VkShaderModule handle" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-pName-parameter", + "text": " pName must be a null-terminated UTF-8 string" + }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-pSpecializationInfo-parameter", + "text": " If pSpecializationInfo is not NULL, pSpecializationInfo must be a valid pointer to a valid VkSpecializationInfo structure" + } + ], + "(VK_EXT_shader_stencil_export)": [ + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-01511", + "text": " If stage is VK_SHADER_STAGE_FRAGMENT_BIT, and the identified entry point writes to FragStencilRefEXT in any execution path, it must write to FragStencilRefEXT in all execution paths" + } + ] + }, + "vkCreateGraphicsPipelines": { + "core": [ + { + "vuid": "VUID-vkCreateGraphicsPipelines-flags-00720", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-flags-00721", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-pipelineCache-parameter", + "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-pCreateInfos-parameter", + "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkGraphicsPipelineCreateInfo structures" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-pPipelines-parameter", + "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-createInfoCount-arraylength", + "text": " createInfoCount must be greater than 0" + }, + { + "vuid": "VUID-vkCreateGraphicsPipelines-pipelineCache-parent", + "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "VkGraphicsPipelineCreateInfo": { + "core": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00722", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a graphics VkPipeline" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00723", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00724", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is not -1, basePipelineHandle must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00725", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is not VK_NULL_HANDLE, basePipelineIndex must be -1" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-00726", + "text": " The stage member of each element of pStages must be unique" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-00727", + "text": " The stage member of one element of pStages must be VK_SHADER_STAGE_VERTEX_BIT" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-stage-00728", + "text": " The stage member of each element of pStages must not be VK_SHADER_STAGE_COMPUTE_BIT" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00729", + "text": " If pStages includes a tessellation control shader stage, it must include a tessellation evaluation shader stage" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00730", + "text": " If pStages includes a tessellation evaluation shader stage, it must include a tessellation control shader stage" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00731", + "text": " If pStages includes a tessellation control shader stage and a tessellation evaluation shader stage, pTessellationState must be a valid pointer to a valid VkPipelineTessellationStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00732", + "text": " If pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00733", + "text": " If pStages includes tessellation shader stages, and the shader code of both stages contain an OpExecutionMode instruction that specifies the type of subdivision in the pipeline, they must both specify the same subdivision mode" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00734", + "text": " If pStages includes tessellation shader stages, the shader code of at least one stage must contain an OpExecutionMode instruction that specifies the output patch size in the pipeline" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00735", + "text": " If pStages includes tessellation shader stages, and the shader code of both contain an OpExecutionMode instruction that specifies the out patch size in the pipeline, they must both specify the same patch size" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00736", + "text": " If pStages includes tessellation shader stages, the topology member of pInputAssembly must be VK_PRIMITIVE_TOPOLOGY_PATCH_LIST" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-topology-00737", + "text": " If the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, pStages must include tessellation shader stages" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00738", + "text": " If pStages includes a geometry shader stage, and does not include any tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is &amp;lt;&amp;lt;shaders-geometry-execution, compatible&amp;gt;&amp;gt; with the primitive topology specified in pInputAssembly" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00739", + "text": " If pStages includes a geometry shader stage, and also includes tessellation shader stages, its shader code must contain an OpExecutionMode instruction that specifies an input primitive type that is &amp;lt;&amp;lt;shaders-geometry-execution, compatible&amp;gt;&amp;gt; with the primitive topology that is output by the tessellation stages" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00740", + "text": " If pStages includes a fragment shader stage and a geometry shader stage, and the fragment shader code reads from an input variable that is decorated with PrimitiveID, then the geometry shader code must write to a matching output variable, decorated with PrimitiveID, in all execution paths" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00741", + "text": " If pStages includes a fragment shader stage, its shader code must not read from any input attachment that is defined as VK_ATTACHMENT_UNUSED in subpass" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00742", + "text": " The shader code for the entry points identified by pStages, and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the &amp;lt;&amp;lt;interfaces,Shader Interfaces&amp;gt;&amp;gt; chapter" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00745", + "text": " If rasterization is not disabled and the subpass uses color attachments, then for each color attachment in the subpass the blendEnable member of the corresponding element of the pAttachment member of pColorBlendState must be VK_FALSE if the format of the attachment does not support color blend operations, as specified by the VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT flag in VkFormatProperties::linearTilingFeatures or VkFormatProperties::optimalTilingFeatures returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-attachmentCount-00746", + "text": " If rasterization is not disabled and the subpass uses color attachments, the attachmentCount member of pColorBlendState must be equal to the colorAttachmentCount used to create subpass" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00747", + "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT, the pViewports member of pViewportState must be a valid pointer to an array of pViewportState::viewportCount VkViewport structures" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00748", + "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SCISSOR, the pScissors member of pViewportState must be a valid pointer to an array of pViewportState::scissorCount VkRect2D structures" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00749", + "text": " If the wide lines feature is not enabled, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_WIDTH, the lineWidth member of pRasterizationState must be 1.0" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750", + "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751", + "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pMultisampleState must be a valid pointer to a valid VkPipelineMultisampleStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00752", + "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, and subpass uses a depth/stencil attachment, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00753", + "text": " If the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, and subpass uses color attachments, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00754", + "text": " If the depth bias clamping feature is not enabled, no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BIAS, and the depthBiasEnable member of pRasterizationState is VK_TRUE, the depthBiasClamp member of pRasterizationState must be 0.0" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-00756", + "text": " layout must be &amp;lt;&amp;lt;descriptorsets-pipelinelayout-consistency,consistent&amp;gt;&amp;gt; with all shaders specified in pStages" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00758", + "text": " If subpass does not use any color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must follow the rules for a &amp;lt;&amp;lt;renderpass-noattachments, zero-attachment subpass&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00759", + "text": " subpass must be a valid subpass within renderPass" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-01688", + "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineDiscardRectangleStateCreateInfoEXT" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-parameter", + "text": " pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pVertexInputState-parameter", + "text": " pVertexInputState must be a valid pointer to a valid VkPipelineVertexInputStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pInputAssemblyState-parameter", + "text": " pInputAssemblyState must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-parameter", + "text": " pRasterizationState must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicState-parameter", + "text": " If pDynamicState is not NULL, pDynamicState must be a valid pointer to a valid VkPipelineDynamicStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-parameter", + "text": " renderPass must be a valid VkRenderPass handle" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-stageCount-arraylength", + "text": " stageCount must be greater than 0" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-commonparent", + "text": " Each of basePipelineHandle, layout, and renderPass that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00743", + "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00744", + "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01756", + "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL in the VkAttachmentReference defined by subpass, the depthWriteEnable member of pDepthStencilState must be VK_FALSE" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01757", + "text": " If rasterization is not disabled and subpass uses a depth/stencil attachment in renderPass that has a layout of VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL in the VkAttachmentReference defined by subpass, the failOp, passOp and depthFailOp members of each of the front and back members of pDepthStencilState must be VK_STENCIL_OP_KEEP" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-01565", + "text": " If pStages includes a fragment shader stage and an input attachment was referenced by the VkRenderPassInputAttachmentAspectCreateInfo at renderPass create time, its shader code must not read from any aspect that was not specified in the aspectMask of the corresponding VkInputAttachmentAspectReference structure." + } + ], + "!(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00755", + "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive" + } + ], + "(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00755", + "text": " If the VK_EXT_depth_range_unrestricted extension is not enabled and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_DEPTH_BOUNDS, and the depthBoundsTestEnable member of pDepthStencilState is VK_TRUE, the minDepthBounds and maxDepthBounds members of pDepthStencilState must be between 0.0 and 1.0, inclusive" + } + ], + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01521", + "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure chained to the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.width must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.width as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01522", + "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure chained to the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationGridSize.height must evenly divide VkMultisamplePropertiesEXT::sampleLocationGridSize.height as returned by vkGetPhysicalDeviceMultisamplePropertiesEXT with a samples parameter equaling rasterizationSamples" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01523", + "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, and the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure chained to the pNext chain of pMultisampleState is VK_TRUE, sampleLocationsInfo.sampleLocationsPerPixel must equal rasterizationSamples" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-sampleLocationsEnable-01524", + "text": " If the sampleLocationsEnable member of a VkPipelineSampleLocationsStateCreateInfoEXT structure chained to the pNext chain of pMultisampleState is VK_TRUE, the fragment shader code must not statically use the extended instruction InterpolateAtSample" + } + ], + "!(VK_AMD_mixed_attachment_samples)+!(VK_NV_framebuffer_mixed_samples)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-00757", + "text": " If subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must be the same as the sample count for those subpass attachments" + } + ], + "(VK_AMD_mixed_attachment_samples)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01505", + "text": " If subpass uses color and/or depth/stencil attachments, then the rasterizationSamples member of pMultisampleState must equal the maximum of the sample counts of those subpass attachments" + } + ], + "(VK_NV_framebuffer_mixed_samples)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01411", + "text": " If subpass has a depth/stencil attachment and depth test, stencil test, or depth bounds test are enabled, then the rasterizationSamples member of pMultisampleState must be the same as the sample count of the depth/stencil attachment" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-subpass-01412", + "text": " If subpass has any color attachments, then the rasterizationSamples member of pMultisampleState must be greater than or equal to the sample count for those subpass attachments" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00760", + "text": " If the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewTessellationShader is not enabled, then pStages must not include tessellation shaders." + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00761", + "text": " If the renderPass has multiview enabled and subpass has more than one bit set in the view mask and multiviewGeometryShader is not enabled, then pStages must not include a geometry shader." + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00762", + "text": " If the renderPass has multiview enabled and subpass has more than one bit set in the view mask, shaders in the pipeline must not write to the Layer built-in output" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-00763", + "text": " If the renderPass has multiview enabled, then all shaders must not include variables decorated with the Layer built-in decoration in their interfaces." + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-00764", + "text": " flags must not contain the VK_PIPELINE_CREATE_DISPATCH_BASE flag." + } + ], + "(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-01715", + "text": " If no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, and the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure, chained to the pNext chain of pViewportState, is VK_TRUE, the pViewportWScalings member of the VkPipelineViewportWScalingStateCreateInfoNV must be a pointer to an array of VkPipelineViewportWScalingStateCreateInfoNV::viewportCount valid VkViewportWScalingNV structures" + } + ] + }, + "VkPipelineDynamicStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-01442", + "text": " Each element of pDynamicStates must be unique" + }, + { + "vuid": "VUID-VkPipelineDynamicStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineDynamicStateCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineDynamicStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineDynamicStateCreateInfo-pDynamicStates-parameter", + "text": " pDynamicStates must be a valid pointer to an array of dynamicStateCount valid VkDynamicState values" + }, + { + "vuid": "VUID-VkPipelineDynamicStateCreateInfo-dynamicStateCount-arraylength", + "text": " dynamicStateCount must be greater than 0" + } + ] + }, + "vkDestroyPipeline": { + "core": [ + { + "vuid": "VUID-vkDestroyPipeline-pipeline-00765", + "text": " All submitted commands that refer to pipeline must have completed execution" + }, + { + "vuid": "VUID-vkDestroyPipeline-pipeline-00766", + "text": " If VkAllocationCallbacks were provided when pipeline was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyPipeline-pipeline-00767", + "text": " If no VkAllocationCallbacks were provided when pipeline was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyPipeline-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyPipeline-pipeline-parameter", + "text": " If pipeline is not VK_NULL_HANDLE, pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkDestroyPipeline-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyPipeline-pipeline-parent", + "text": " If pipeline is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreatePipelineCache": { + "core": [ + { + "vuid": "VUID-vkCreatePipelineCache-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreatePipelineCache-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkPipelineCacheCreateInfo structure" + }, + { + "vuid": "VUID-vkCreatePipelineCache-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreatePipelineCache-pPipelineCache-parameter", + "text": " pPipelineCache must be a valid pointer to a VkPipelineCache handle" + } + ] + }, + "VkPipelineCacheCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineCacheCreateInfo-initialDataSize-00768", + "text": " If initialDataSize is not 0, it must be equal to the size of pInitialData, as returned by vkGetPipelineCacheData when pInitialData was originally retrieved" + }, + { + "vuid": "VUID-VkPipelineCacheCreateInfo-initialDataSize-00769", + "text": " If initialDataSize is not 0, pInitialData must have been retrieved from a previous call to vkGetPipelineCacheData" + }, + { + "vuid": "VUID-VkPipelineCacheCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineCacheCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineCacheCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineCacheCreateInfo-pInitialData-parameter", + "text": " If initialDataSize is not 0, pInitialData must be a valid pointer to an array of initialDataSize bytes" + } + ] + }, + "vkMergePipelineCaches": { + "core": [ + { + "vuid": "VUID-vkMergePipelineCaches-dstCache-00770", + "text": " dstCache must not appear in the list of source caches" + }, + { + "vuid": "VUID-vkMergePipelineCaches-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkMergePipelineCaches-dstCache-parameter", + "text": " dstCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkMergePipelineCaches-pSrcCaches-parameter", + "text": " pSrcCaches must be a valid pointer to an array of srcCacheCount valid VkPipelineCache handles" + }, + { + "vuid": "VUID-vkMergePipelineCaches-srcCacheCount-arraylength", + "text": " srcCacheCount must be greater than 0" + }, + { + "vuid": "VUID-vkMergePipelineCaches-dstCache-parent", + "text": " dstCache must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkMergePipelineCaches-pSrcCaches-parent", + "text": " Each element of pSrcCaches must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetPipelineCacheData": { + "core": [ + { + "vuid": "VUID-vkGetPipelineCacheData-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetPipelineCacheData-pipelineCache-parameter", + "text": " pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkGetPipelineCacheData-pDataSize-parameter", + "text": " pDataSize must be a valid pointer to a size_t value" + }, + { + "vuid": "VUID-vkGetPipelineCacheData-pData-parameter", + "text": " If the value referenced by pDataSize is not 0, and pData is not NULL, pData must be a valid pointer to an array of pDataSize bytes" + }, + { + "vuid": "VUID-vkGetPipelineCacheData-pipelineCache-parent", + "text": " pipelineCache must have been created, allocated, or retrieved from device" + } + ] + }, + "vkDestroyPipelineCache": { + "core": [ + { + "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-00771", + "text": " If VkAllocationCallbacks were provided when pipelineCache was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-00772", + "text": " If no VkAllocationCallbacks were provided when pipelineCache was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyPipelineCache-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-parameter", + "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkDestroyPipelineCache-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyPipelineCache-pipelineCache-parent", + "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "VkSpecializationInfo": { + "core": [ + { + "vuid": "VUID-VkSpecializationInfo-offset-00773", + "text": " The offset member of each element of pMapEntries must be less than dataSize" + }, + { + "vuid": "VUID-VkSpecializationInfo-pMapEntries-00774", + "text": " The size member of each element of pMapEntries must be less than or equal to dataSize minus offset" + }, + { + "vuid": "VUID-VkSpecializationInfo-mapEntryCount-00775", + "text": " If mapEntryCount is not 0, pMapEntries must be a valid pointer to an array of mapEntryCount valid VkSpecializationMapEntry structures" + }, + { + "vuid": "VUID-VkSpecializationInfo-pData-parameter", + "text": " If dataSize is not 0, pData must be a valid pointer to an array of dataSize bytes" + } + ] + }, + "VkSpecializationMapEntry": { + "core": [ + { + "vuid": "VUID-VkSpecializationMapEntry-constantID-00776", + "text": " For a constantID specialization constant declared in a shader, size must match the byte size of the constantID. If the specialization constant is of type boolean, size must be the byte size of VkBool32" + } + ] + }, + "vkCmdBindPipeline": { + "core": [ + { + "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00777", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_COMPUTE, the VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00778", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS, the VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00779", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_COMPUTE, pipeline must be a compute pipeline" + }, + { + "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-00780", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline must be a graphics pipeline" + }, + { + "vuid": "VUID-vkCmdBindPipeline-pipeline-00781", + "text": " If the &amp;lt;&amp;lt;features-features-variableMultisampleRate,variable multisample rate&amp;gt;&amp;gt; feature is not supported, pipeline is a graphics pipeline, the current subpass has no attachments, and this is not the first call to this function with a graphics pipeline after transitioning to the current subpass, then the sample count specified by this pipeline must match that set in the previous pipeline" + }, + { + "vuid": "VUID-vkCmdBindPipeline-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindPipeline-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-vkCmdBindPipeline-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkCmdBindPipeline-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBindPipeline-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdBindPipeline-commonparent", + "text": " Both of commandBuffer, and pipeline must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdBindPipeline-variableSampleLocations-01525", + "text": " If VkPhysicalDeviceSampleLocationsPropertiesEXT::variableSampleLocations is VK_FALSE, and pipeline is a graphics pipeline created with a VkPipelineSampleLocationsStateCreateInfoEXT structure having its sampleLocationsEnable member set to VK_TRUE but without VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT enabled then the current render pass instance must have been begun by specifying a VkRenderPassSampleLocationsBeginInfoEXT structure whose pPostSubpassSampleLocations member contains an element with a subpassIndex matching the current subpass index and the sampleLocationsInfo member of that element must match the sampleLocationsInfo specified in VkPipelineSampleLocationsStateCreateInfoEXT when the pipeline was created" + } + ] + }, + "vkGetShaderInfoAMD": { + "(VK_AMD_shader_info)": [ + { + "vuid": "VUID-vkGetShaderInfoAMD-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetShaderInfoAMD-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetShaderInfoAMD-shaderStage-parameter", + "text": " shaderStage must be a valid VkShaderStageFlagBits value" + }, + { + "vuid": "VUID-vkGetShaderInfoAMD-infoType-parameter", + "text": " infoType must be a valid VkShaderInfoTypeAMD value" + }, + { + "vuid": "VUID-vkGetShaderInfoAMD-pInfoSize-parameter", + "text": " pInfoSize must be a valid pointer to a size_t value" + }, + { + "vuid": "VUID-vkGetShaderInfoAMD-pInfo-parameter", + "text": " If the value referenced by pInfoSize is not 0, and pInfo is not NULL, pInfo must be a valid pointer to an array of pInfoSize bytes" + }, + { + "vuid": "VUID-vkGetShaderInfoAMD-pipeline-parent", + "text": " pipeline must have been created, allocated, or retrieved from device" + } + ] + }, + "VkAllocationCallbacks": { + "core": [ + { + "vuid": "VUID-VkAllocationCallbacks-pfnAllocation-00632", + "text": " pfnAllocation must be a valid pointer to a valid user-defined PFN_vkAllocationFunction" + }, + { + "vuid": "VUID-VkAllocationCallbacks-pfnReallocation-00633", + "text": " pfnReallocation must be a valid pointer to a valid user-defined PFN_vkReallocationFunction" + }, + { + "vuid": "VUID-VkAllocationCallbacks-pfnFree-00634", + "text": " pfnFree must be a valid pointer to a valid user-defined PFN_vkFreeFunction" + }, + { + "vuid": "VUID-VkAllocationCallbacks-pfnInternalAllocation-00635", + "text": " If either of pfnInternalAllocation or pfnInternalFree is not NULL, both must be valid callbacks" + } + ] + }, + "vkGetPhysicalDeviceMemoryProperties": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties-pMemoryProperties-parameter", + "text": " pMemoryProperties must be a valid pointer to a VkPhysicalDeviceMemoryProperties structure" + } + ] + }, + "vkGetPhysicalDeviceMemoryProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties2-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceMemoryProperties2-pMemoryProperties-parameter", + "text": " pMemoryProperties must be a valid pointer to a VkPhysicalDeviceMemoryProperties2 structure" + } + ] + }, + "VkPhysicalDeviceMemoryProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkPhysicalDeviceMemoryProperties2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2" + }, + { + "vuid": "VUID-VkPhysicalDeviceMemoryProperties2-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkAllocateMemory": { + "core": [ + { + "vuid": "VUID-vkAllocateMemory-pAllocateInfo-01713", + "text": " pAllocateInfo\\-&amp;gt;allocationSize must be less than or equal to VkPhysicalDeviceMemoryProperties::memoryHeaps[pAllocateInfo\\-&amp;gt;memoryTypeIndex].size as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from." + }, + { + "vuid": "VUID-vkAllocateMemory-pAllocateInfo-01714", + "text": " pAllocateInfo\\-&amp;gt;memoryTypeIndex must be less than VkPhysicalDeviceMemoryProperties::memoryTypeCount as returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created from." + }, + { + "vuid": "VUID-vkAllocateMemory-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkAllocateMemory-pAllocateInfo-parameter", + "text": " pAllocateInfo must be a valid pointer to a valid VkMemoryAllocateInfo structure" + }, + { + "vuid": "VUID-vkAllocateMemory-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkAllocateMemory-pMemory-parameter", + "text": " pMemory must be a valid pointer to a VkDeviceMemory handle" + } + ] + }, + "VkMemoryAllocateInfo": { + "!(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00638", + "text": " allocationSize must be greater than 0" + } + ], + "(VK_KHR_external_memory)+(VK_KHR_dedicated_allocation,VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-pNext-00639", + "text": " If the pNext chain contains an instance of VkExportMemoryAllocateInfo, and any of the handle types specified in VkExportMemoryAllocateInfo::handleTypes require a dedicated allocation, as reported by vkGetPhysicalDeviceImageFormatProperties2 in VkExternalImageFormatProperties::externalMemoryProperties::externalMemoryFeatures or VkExternalBufferProperties::externalMemoryProperties::externalMemoryFeatures, the pNext chain must contain an instance of ifdef::VK_KHR_dedicated_allocation[VkMemoryDedicatedAllocateInfo]" + } + ], + "(VK_KHR_external_memory)+(VK_NV_external_memory)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-pNext-00640", + "text": " If the pNext chain contains an instance of VkExportMemoryAllocateInfo, it must not contain an instance of VkExportMemoryAllocateInfoNV or VkExportMemoryWin32HandleInfoNV." + } + ], + "(VK_KHR_external_memory_win32+VK_NV_external_memory_win32)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-pNext-00641", + "text": " If the pNext chain contains an instance of VkImportMemoryWin32HandleInfoKHR, it must not contain an instance of VkImportMemoryWin32HandleInfoNV." + } + ], + "(VK_KHR_external_memory_fd)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01742", + "text": " If the parameters define an import operation, the external handle specified was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the memory object being imported was created." + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-00648", + "text": " If the parameters define an import operation and the external handle is a POSIX file descriptor created outside of the Vulkan API, the value of memoryTypeIndex must be one of those returned by vkGetMemoryFdPropertiesKHR." + } + ], + "(VK_KHR_external_memory+VK_KHR_device_group)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-None-00643", + "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the device mask specified by VkMemoryAllocateFlagsInfo must match that specified when the memory object being imported was allocated." + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-None-00644", + "text": " If the parameters define an import operation and the external handle specified was created by the Vulkan API, the list of physical devices that comprise the logical device passed to vkAllocateMemory must match the list of physical devices that comprise the logical device on which the memory was originally allocated." + } + ], + "(VK_KHR_external_memory_win32)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-00645", + "text": " If the parameters define an import operation and the external handle is an NT handle or a global share handle created outside of the Vulkan API, the value of memoryTypeIndex must be one of those returned by vkGetMemoryWin32HandlePropertiesKHR." + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01743", + "text": " If the parameters define an import operation, the external handle was created by the Vulkan API, and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR or VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR, then the values of allocationSize and memoryTypeIndex must match those specified when the memory object being imported was created." + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00646", + "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, allocationSize must match the size reported in the memory requirements of the image or buffer member of the instance of VkDedicatedAllocationMemoryAllocateInfoNV included in the pNext chain." + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-00647", + "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, allocationSize must match the size specified when creating the Direct3D 12 heap from which the external handle was extracted." + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-01872", + "text": " If the protected memory feature is not enabled, the VkMemoryAllocateInfo::memoryTypeIndex must not indicate a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT." + } + ], + "(VK_EXT_external_memory_host)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-memoryTypeIndex-01744", + "text": " If the parameters define an import operation and the external handle is a host pointer, the value of memoryTypeIndex must be one of those returned by vkGetMemoryHostPointerPropertiesEXT" + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-allocationSize-01745", + "text": " If the parameters define an import operation and the external handle is a host pointer, allocationSize must be an integer multiple of VkPhysicalDeviceExternalMemoryHostPropertiesEXT::minImportedHostPointerAlignment" + } + ], + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-None-01873", + "text": " If the parameters define an import operation and the external handle type is VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BIT_ANDROID:" + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-pNext-01874", + "text": " If the parameters do not define an import operation, and the pNext chain contains an instance of VkExportMemoryAllocateInfo with VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID included in its handleTypes member, and the pNext contains an instance of VkMemoryDedicatedAllocateInfo with image not equal to VK_NULL_HANDLE, then allocationSize must be 0, otherwise allocationSize must be greater than 0." + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-pNext-01875", + "text": " If the parameters define an import operation, the external handle is an Android hardware buffer, and the pNext chain includes an instance of VkMemoryDedicatedAllocateInfo with image that is not VK_NULL_HANDLE:" + } + ], + "core": [ + { + "vuid": "VUID-VkMemoryAllocateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO" + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationMemoryAllocateInfoNV, VkExportMemoryAllocateInfo, VkExportMemoryAllocateInfoNV, VkExportMemoryWin32HandleInfoKHR, VkExportMemoryWin32HandleInfoNV, VkImportAndroidHardwareBufferInfoANDROID, VkImportMemoryFdInfoKHR, VkImportMemoryHostPointerInfoEXT, VkImportMemoryWin32HandleInfoKHR, VkImportMemoryWin32HandleInfoNV, VkMemoryAllocateFlagsInfo, or VkMemoryDedicatedAllocateInfo" + }, + { + "vuid": "VUID-VkMemoryAllocateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + } + ] + }, + "VkMemoryDedicatedAllocateInfo": { + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01432", + "text": " At least one of image and buffer must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01433", + "text": " If image is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the image" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01434", + "text": " If image is not VK_NULL_HANDLE, image must have been created without VK_IMAGE_CREATE_SPARSE_BINDING_BIT set in VkImageCreateInfo::flags" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01435", + "text": " If buffer is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the buffer" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01436", + "text": " If buffer is not VK_NULL_HANDLE, buffer must have been created without VK_BUFFER_CREATE_SPARSE_BINDING_BIT set in VkBufferCreateInfo::flags" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-parameter", + "text": " If image is not VK_NULL_HANDLE, image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-parameter", + "text": " If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-commonparent", + "text": " Both of buffer, and image that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_KHR_external_memory_win32)": [ + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01876", + "text": " If image is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, and the external handle was created by the Vulkan API, then the memory being imported must also be a dedicated image allocation and image must be identical to the image associated with the imported memory." + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01877", + "text": " If buffer is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, and the external handle was created by the Vulkan API, then the memory being imported must also be a dedicated buffer allocation and buffer must be identical to the buffer associated with the imported memory." + } + ], + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_KHR_external_memory_fd)": [ + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01878", + "text": " If image is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, the memory being imported must also be a dedicated image allocation and image must be identical to the image associated with the imported memory." + }, + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-buffer-01879", + "text": " If buffer is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation with handle type VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, the memory being imported must also be a dedicated buffer allocation and buffer must be identical to the buffer associated with the imported memory." + } + ], + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)+(VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkMemoryDedicatedAllocateInfo-image-01797", + "text": " If image is not VK_NULL_HANDLE, image must not have been created with VK_IMAGE_CREATE_DISJOINT_BIT set in VkImageCreateInfo::flags" + } + ] + }, + "VkDedicatedAllocationMemoryAllocateInfoNV": { + "(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00649", + "text": " At least one of image and buffer must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00650", + "text": " If image is not VK_NULL_HANDLE, the image must have been created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00651", + "text": " If buffer is not VK_NULL_HANDLE, the buffer must have been created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00652", + "text": " If image is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the image" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00653", + "text": " If buffer is not VK_NULL_HANDLE, VkMemoryAllocateInfo::allocationSize must equal the VkMemoryRequirements::size of the buffer" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-parameter", + "text": " If image is not VK_NULL_HANDLE, image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-parameter", + "text": " If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-commonparent", + "text": " Both of buffer, and image that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_NV_dedicated_allocation)+(VK_KHR_external_memory_win32,VK_KHR_external_memory_fd)": [ + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-image-00654", + "text": " If image is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation, the memory being imported must also be a dedicated image allocation and image must be identical to the image associated with the imported memory." + }, + { + "vuid": "VUID-VkDedicatedAllocationMemoryAllocateInfoNV-buffer-00655", + "text": " If buffer is not VK_NULL_HANDLE and VkMemoryAllocateInfo defines a memory import operation, the memory being imported must also be a dedicated buffer allocation and buffer must be identical to the buffer associated with the imported memory." + } + ] + }, + "VkExportMemoryAllocateInfo": { + "(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkExportMemoryAllocateInfo-handleTypes-00656", + "text": " The bits in handleTypes must be supported and compatible, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties." + }, + { + "vuid": "VUID-VkExportMemoryAllocateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO" + }, + { + "vuid": "VUID-VkExportMemoryAllocateInfo-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBits values" + } + ] + }, + "VkExportMemoryWin32HandleInfoKHR": { + "(VK_KHR_external_memory_win32)": [ + { + "vuid": "VUID-VkExportMemoryWin32HandleInfoKHR-handleTypes-00657", + "text": " If VkExportMemoryAllocateInfo::handleTypes does not include VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, VkExportMemoryWin32HandleInfoKHR must not be in the pNext chain of VkMemoryAllocateInfo." + }, + { + "vuid": "VUID-VkExportMemoryWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkExportMemoryWin32HandleInfoKHR-pAttributes-parameter", + "text": " If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value" + } + ] + }, + "VkImportMemoryWin32HandleInfoKHR": { + "(VK_KHR_external_memory_win32)": [ + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00658", + "text": " If handleType is not 0, it must be supported for import, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handle-00659", + "text": " The memory from which handle was exported, or the memory named by name must have been created on the same underlying physical device as device." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00660", + "text": " If handleType is not 0, it must be defined as an NT handle or a global share handle." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01439", + "text": " If handleType is not VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, or VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, name must be NULL." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-01440", + "text": " If handleType is not 0 and handle is NULL, name must name a valid memory resource of the type specified by handleType." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-00661", + "text": " If handleType is not 0 and name is NULL, handle must be a valid handle of the type specified by handleType." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handle-01441", + "text": " if handle is not NULL, name must be NULL." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handle-01518", + "text": " If handle is not NULL, it must obey any requirements listed for handleType in external memory handle types compatibility." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-name-01519", + "text": " If name is not NULL, it must obey any requirements listed for handleType in external memory handle types compatibility." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoKHR-handleType-parameter", + "text": " If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + } + ] + }, + "vkGetMemoryWin32HandleKHR": { + "(VK_KHR_external_memory_win32)": [ + { + "vuid": "VUID-vkGetMemoryWin32HandleKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleKHR-pGetWin32HandleInfo-parameter", + "text": " pGetWin32HandleInfo must be a valid pointer to a valid VkMemoryGetWin32HandleInfoKHR structure" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleKHR-pHandle-parameter", + "text": " pHandle must be a valid pointer to a HANDLE value" + } + ] + }, + "VkMemoryGetWin32HandleInfoKHR": { + "(VK_KHR_external_memory_win32)": [ + { + "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00662", + "text": " handleType must have been included in VkExportMemoryAllocateInfo::handleTypes when memory was created." + }, + { + "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00663", + "text": " If handleType is defined as an NT handle, vkGetMemoryWin32HandleKHR must be called no more than once for each valid unique combination of memory and handleType." + }, + { + "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-00664", + "text": " handleType must be defined as an NT handle or a global share handle." + }, + { + "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR" + }, + { + "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-VkMemoryGetWin32HandleInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + } + ] + }, + "vkGetMemoryWin32HandlePropertiesKHR": { + "(VK_KHR_external_memory_win32)": [ + { + "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-handle-00665", + "text": " handle must be an external memory handle created outside of the Vulkan API." + }, + { + "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-00666", + "text": " handleType must not be one of the handle types defined as opaque." + }, + { + "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandlePropertiesKHR-pMemoryWin32HandleProperties-parameter", + "text": " pMemoryWin32HandleProperties must be a valid pointer to a VkMemoryWin32HandlePropertiesKHR structure" + } + ] + }, + "VkImportMemoryFdInfoKHR": { + "(VK_KHR_external_memory_fd)": [ + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00667", + "text": " If handleType is not 0, it must be supported for import, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties." + }, + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-fd-00668", + "text": " The memory from which fd was exported must have been created on the same underlying physical device as device." + }, + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00669", + "text": " If handleType is not 0, it must be defined as a POSIX file descriptor handle." + }, + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-00670", + "text": " If handleType is not 0, fd must be a valid handle of the type specified by handleType." + }, + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-fd-01746", + "text": " The memory represented by fd must have been created from a physical device and driver that is compatible with device and handleType, as described in &amp;lt;&amp;lt;external-memory-handle-types-compatibility&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-fd-01520", + "text": " fd must obey any requirements listed for handleType in &amp;lt;&amp;lt;external-memory-handle-types-compatibility,external memory handle types compatibility&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR" + }, + { + "vuid": "VUID-VkImportMemoryFdInfoKHR-handleType-parameter", + "text": " If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + } + ] + }, + "vkGetMemoryFdKHR": { + "(VK_KHR_external_memory_fd)": [ + { + "vuid": "VUID-vkGetMemoryFdKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryFdKHR-pGetFdInfo-parameter", + "text": " pGetFdInfo must be a valid pointer to a valid VkMemoryGetFdInfoKHR structure" + }, + { + "vuid": "VUID-vkGetMemoryFdKHR-pFd-parameter", + "text": " pFd must be a valid pointer to a int value" + } + ] + }, + "VkMemoryGetFdInfoKHR": { + "(VK_KHR_external_memory_fd)": [ + { + "vuid": "VUID-VkMemoryGetFdInfoKHR-handleType-00671", + "text": " handleType must have been included in VkExportMemoryAllocateInfo::handleTypes when memory was created." + }, + { + "vuid": "VUID-VkMemoryGetFdInfoKHR-handleType-00672", + "text": " handleType must be defined as a POSIX file descriptor handle." + }, + { + "vuid": "VUID-VkMemoryGetFdInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR" + }, + { + "vuid": "VUID-VkMemoryGetFdInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkMemoryGetFdInfoKHR-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-VkMemoryGetFdInfoKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + } + ] + }, + "vkGetMemoryFdPropertiesKHR": { + "(VK_KHR_external_memory_fd)": [ + { + "vuid": "VUID-vkGetMemoryFdPropertiesKHR-fd-00673", + "text": " fd must be an external memory handle created outside of the Vulkan API." + }, + { + "vuid": "VUID-vkGetMemoryFdPropertiesKHR-handleType-00674", + "text": " handleType must not be VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR." + }, + { + "vuid": "VUID-vkGetMemoryFdPropertiesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryFdPropertiesKHR-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + }, + { + "vuid": "VUID-vkGetMemoryFdPropertiesKHR-pMemoryFdProperties-parameter", + "text": " pMemoryFdProperties must be a valid pointer to a VkMemoryFdPropertiesKHR structure" + } + ] + }, + "VkImportMemoryHostPointerInfoEXT": { + "(VK_EXT_external_memory_host)": [ + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01747", + "text": " If handleType is not 0, it must be supported for import, as reported in VkExternalMemoryPropertiesKHR" + }, + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01748", + "text": " If handleType is not 0, it must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT or VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT" + }, + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-pHostPointer-01749", + "text": " pHostPointer must be a pointer aligned to an integer multiple of VkPhysicalDeviceExternalMemoryHostPropertiesEXT::minImportedHostPointerAlignment" + }, + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01750", + "text": " If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, pHostPointer must be a pointer to allocationSize number of bytes of host memory, where allocationSize is the member of the VkMemoryAllocateInfo structure this structure is chained to" + }, + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-01751", + "text": " If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, pHostPointer must be a pointer to allocationSize number of bytes of host mapped foreign memory, where allocationSize is the member of the VkMemoryAllocateInfo structure this structure is chained to" + }, + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT" + }, + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + } + ] + }, + "vkGetMemoryHostPointerPropertiesEXT": { + "(VK_EXT_external_memory_host)": [ + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01752", + "text": " handleType must be VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT or VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT" + }, + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-pHostPointer-01753", + "text": " pHostPointer must be a pointer aligned to an integer multiple of VkPhysicalDeviceExternalMemoryHostPropertiesEXT::minImportedHostPointerAlignment" + }, + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01754", + "text": " If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, pHostPointer must be a pointer to host memory" + }, + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-01755", + "text": " If handleType is VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, pHostPointer must be a pointer to host mapped foreign memory" + }, + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + }, + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-pMemoryHostPointerProperties-parameter", + "text": " pMemoryHostPointerProperties must be a valid pointer to a VkMemoryHostPointerPropertiesEXT structure" + } + ] + }, + "VkMemoryHostPointerPropertiesEXT": { + "(VK_EXT_external_memory_host)": [ + { + "vuid": "VUID-VkMemoryHostPointerPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT" + }, + { + "vuid": "VUID-VkMemoryHostPointerPropertiesEXT-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkImportAndroidHardwareBufferInfoANDROID": { + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01880", + "text": " If buffer is not NULL, Android hardware buffers must be supported for import, as reported by VkExternalImageFormatProperties or VkExternalBufferProperties." + }, + { + "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-01881", + "text": " If buffer is not NULL, it must be a valid Android hardware buffer object with format and usage compatible with Vulkan as described by VkExternalMemoryHandleTypeFlagBits." + }, + { + "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID" + }, + { + "vuid": "VUID-VkImportAndroidHardwareBufferInfoANDROID-buffer-parameter", + "text": " buffer must be a valid pointer to a AHardwareBuffer value" + } + ] + }, + "vkGetMemoryAndroidHardwareBufferANDROID": { + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-vkGetMemoryAndroidHardwareBufferANDROID-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkMemoryGetAndroidHardwareBufferInfoANDROID structure" + }, + { + "vuid": "VUID-vkGetMemoryAndroidHardwareBufferANDROID-pBuffer-parameter", + "text": " pBuffer must be a valid pointer to a valid pointer to a AHardwareBuffer value" + } + ] + }, + "VkMemoryGetAndroidHardwareBufferInfoANDROID": { + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-handleTypes-01882", + "text": " VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must have been included in VkExportMemoryAllocateInfoKHR::handleTypes when memory was created." + }, + { + "vuid": "VUID-VkMemoryGetAndroidHardwareBufferInfoANDROID-pNext-01883", + "text": " If the pNext chain of the VkMemoryAllocateInfo used to allocate memory included a VkMemoryDedicatedAllocateInfo with non-NULL image member, then that image must already be bound to memory." + } + ] + }, + "vkGetAndroidHardwareBufferPropertiesANDROID": { + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-01884", + "text": " buffer must be a valid Android hardware buffer object with at least one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags." + }, + { + "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-buffer-parameter", + "text": " buffer must be a valid pointer to a valid AHardwareBuffer value" + }, + { + "vuid": "VUID-vkGetAndroidHardwareBufferPropertiesANDROID-pProperties-parameter", + "text": " pProperties must be a valid pointer to a VkAndroidHardwareBufferPropertiesANDROID structure" + } + ] + }, + "VkAndroidHardwareBufferFormatPropertiesANDROID": { + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkAndroidHardwareBufferFormatPropertiesANDROID-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID" + } + ] + }, + "VkExportMemoryAllocateInfoNV": { + "(VK_NV_external_memory)": [ + { + "vuid": "VUID-VkExportMemoryAllocateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV" + }, + { + "vuid": "VUID-VkExportMemoryAllocateInfoNV-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + } + ] + }, + "VkExportMemoryWin32HandleInfoNV": { + "(VK_NV_external_memory_win32)": [ + { + "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV" + }, + { + "vuid": "VUID-VkExportMemoryWin32HandleInfoNV-pAttributes-parameter", + "text": " If pAttributes is not NULL, pAttributes must be a valid pointer to a valid SECURITY_ATTRIBUTES value" + } + ] + }, + "VkImportMemoryWin32HandleInfoNV": { + "(VK_NV_external_memory_win32)": [ + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-01327", + "text": " handleType must not have more than one bit set." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handle-01328", + "text": " handle must be a valid handle to memory, obtained as specified by handleType." + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV" + }, + { + "vuid": "VUID-VkImportMemoryWin32HandleInfoNV-handleType-parameter", + "text": " handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + } + ] + }, + "vkGetMemoryWin32HandleNV": { + "(VK_NV_external_memory_win32)": [ + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-01326", + "text": " handleType must be a flag specified in VkExportMemoryAllocateInfoNV::handleTypes when allocating memory" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-parameter", + "text": " handleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-handleType-requiredbitmask", + "text": " handleType must not be 0" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-pHandle-parameter", + "text": " pHandle must be a valid pointer to a HANDLE value" + }, + { + "vuid": "VUID-vkGetMemoryWin32HandleNV-memory-parent", + "text": " memory must have been created, allocated, or retrieved from device" + } + ] + }, + "VkMemoryAllocateFlagsInfo": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00675", + "text": " If VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT is set, deviceMask must be a valid device mask." + }, + { + "vuid": "VUID-VkMemoryAllocateFlagsInfo-deviceMask-00676", + "text": " If VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT is set, deviceMask must not be zero" + }, + { + "vuid": "VUID-VkMemoryAllocateFlagsInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO" + }, + { + "vuid": "VUID-VkMemoryAllocateFlagsInfo-flags-parameter", + "text": " flags must be a valid combination of VkMemoryAllocateFlagBits values" + } + ] + }, + "vkFreeMemory": { + "core": [ + { + "vuid": "VUID-vkFreeMemory-memory-00677", + "text": " All submitted commands that refer to memory (via images or buffers) must have completed execution" + }, + { + "vuid": "VUID-vkFreeMemory-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkFreeMemory-memory-parameter", + "text": " If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkFreeMemory-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkFreeMemory-memory-parent", + "text": " If memory is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkMapMemory": { + "core": [ + { + "vuid": "VUID-vkMapMemory-memory-00678", + "text": " memory must not be currently mapped" + }, + { + "vuid": "VUID-vkMapMemory-offset-00679", + "text": " offset must be less than the size of memory" + }, + { + "vuid": "VUID-vkMapMemory-size-00680", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be greater than 0" + }, + { + "vuid": "VUID-vkMapMemory-size-00681", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be less than or equal to the size of the memory minus offset" + }, + { + "vuid": "VUID-vkMapMemory-memory-00682", + "text": " memory must have been created with a memory type that reports VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT" + }, + { + "vuid": "VUID-vkMapMemory-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkMapMemory-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkMapMemory-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-vkMapMemory-ppData-parameter", + "text": " ppData must be a valid pointer to a pointer value" + }, + { + "vuid": "VUID-vkMapMemory-memory-parent", + "text": " memory must have been created, allocated, or retrieved from device" + } + ], + "(VK_KHR_device_group)": [ + { + "vuid": "VUID-vkMapMemory-memory-00683", + "text": " memory must not have been allocated with multiple instances." + } + ] + }, + "vkFlushMappedMemoryRanges": { + "core": [ + { + "vuid": "VUID-vkFlushMappedMemoryRanges-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkFlushMappedMemoryRanges-pMemoryRanges-parameter", + "text": " pMemoryRanges must be a valid pointer to an array of memoryRangeCount valid VkMappedMemoryRange structures" + }, + { + "vuid": "VUID-vkFlushMappedMemoryRanges-memoryRangeCount-arraylength", + "text": " memoryRangeCount must be greater than 0" + } + ] + }, + "vkInvalidateMappedMemoryRanges": { + "core": [ + { + "vuid": "VUID-vkInvalidateMappedMemoryRanges-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkInvalidateMappedMemoryRanges-pMemoryRanges-parameter", + "text": " pMemoryRanges must be a valid pointer to an array of memoryRangeCount valid VkMappedMemoryRange structures" + }, + { + "vuid": "VUID-vkInvalidateMappedMemoryRanges-memoryRangeCount-arraylength", + "text": " memoryRangeCount must be greater than 0" + } + ] + }, + "VkMappedMemoryRange": { + "core": [ + { + "vuid": "VUID-VkMappedMemoryRange-memory-00684", + "text": " memory must be currently mapped" + }, + { + "vuid": "VUID-VkMappedMemoryRange-size-00685", + "text": " If size is not equal to VK_WHOLE_SIZE, offset and size must specify a range contained within the currently mapped range of memory" + }, + { + "vuid": "VUID-VkMappedMemoryRange-size-00686", + "text": " If size is equal to VK_WHOLE_SIZE, offset must be within the currently mapped range of memory" + }, + { + "vuid": "VUID-VkMappedMemoryRange-size-01389", + "text": " If size is equal to VK_WHOLE_SIZE, the end of the current mapping of memory must be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize bytes from the beginning of the memory object." + }, + { + "vuid": "VUID-VkMappedMemoryRange-offset-00687", + "text": " offset must be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize" + }, + { + "vuid": "VUID-VkMappedMemoryRange-size-01390", + "text": " If size is not equal to VK_WHOLE_SIZE, size must either be a multiple of VkPhysicalDeviceLimits::nonCoherentAtomSize, or offset plus size must equal the size of memory." + }, + { + "vuid": "VUID-VkMappedMemoryRange-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE" + }, + { + "vuid": "VUID-VkMappedMemoryRange-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkMappedMemoryRange-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + } + ] + }, + "vkUnmapMemory": { + "core": [ + { + "vuid": "VUID-vkUnmapMemory-memory-00689", + "text": " memory must be currently mapped" + }, + { + "vuid": "VUID-vkUnmapMemory-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkUnmapMemory-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkUnmapMemory-memory-parent", + "text": " memory must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetDeviceMemoryCommitment": { + "core": [ + { + "vuid": "VUID-vkGetDeviceMemoryCommitment-memory-00690", + "text": " memory must have been created with a memory type that reports VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT" + }, + { + "vuid": "VUID-vkGetDeviceMemoryCommitment-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceMemoryCommitment-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkGetDeviceMemoryCommitment-pCommittedMemoryInBytes-parameter", + "text": " pCommittedMemoryInBytes must be a valid pointer to a VkDeviceSize value" + }, + { + "vuid": "VUID-vkGetDeviceMemoryCommitment-memory-parent", + "text": " memory must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetDeviceGroupPeerMemoryFeatures": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-heapIndex-00691", + "text": " heapIndex must be less than memoryHeapCount" + }, + { + "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00692", + "text": " localDeviceIndex must be a valid device index" + }, + { + "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-remoteDeviceIndex-00693", + "text": " remoteDeviceIndex must be a valid device index" + }, + { + "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-localDeviceIndex-00694", + "text": " localDeviceIndex must not equal remoteDeviceIndex" + }, + { + "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-parameter", + "text": " pPeerMemoryFeatures must be a valid pointer to a VkPeerMemoryFeatureFlags value" + }, + { + "vuid": "VUID-vkGetDeviceGroupPeerMemoryFeatures-pPeerMemoryFeatures-requiredbitmask", + "text": " pPeerMemoryFeatures must not be 0" + } + ] + }, + "vkCreateBuffer": { + "core": [ + { + "vuid": "VUID-vkCreateBuffer-flags-00911", + "text": " If the flags member of pCreateInfo includes VK_BUFFER_CREATE_SPARSE_BINDING_BIT, creating this VkBuffer must not cause the total required sparse memory for all currently valid sparse resources on the device to exceed VkPhysicalDeviceLimits::sparseAddressSpaceSize" + }, + { + "vuid": "VUID-vkCreateBuffer-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateBuffer-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkBufferCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateBuffer-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateBuffer-pBuffer-parameter", + "text": " pBuffer must be a valid pointer to a VkBuffer handle" + } + ] + }, + "VkBufferCreateInfo": { + "core": [ + { + "vuid": "VUID-VkBufferCreateInfo-size-00912", + "text": " size must be greater than 0" + }, + { + "vuid": "VUID-VkBufferCreateInfo-sharingMode-00913", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values" + }, + { + "vuid": "VUID-VkBufferCreateInfo-sharingMode-00914", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1" + }, + { + "vuid": "VUID-VkBufferCreateInfo-flags-00915", + "text": " If the &amp;lt;&amp;lt;features-features-sparseBinding,sparse bindings&amp;gt;&amp;gt; feature is not enabled, flags must not contain VK_BUFFER_CREATE_SPARSE_BINDING_BIT" + }, + { + "vuid": "VUID-VkBufferCreateInfo-flags-00916", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidencyBuffer,sparse buffer residency&amp;gt;&amp;gt; feature is not enabled, flags must not contain VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkBufferCreateInfo-flags-00917", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidencyAliased,sparse aliased residency&amp;gt;&amp;gt; feature is not enabled, flags must not contain VK_BUFFER_CREATE_SPARSE_ALIASED_BIT" + }, + { + "vuid": "VUID-VkBufferCreateInfo-flags-00918", + "text": " If flags contains VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT or VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_BUFFER_CREATE_SPARSE_BINDING_BIT" + }, + { + "vuid": "VUID-VkBufferCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO" + }, + { + "vuid": "VUID-VkBufferCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationBufferCreateInfoNV or VkExternalMemoryBufferCreateInfo" + }, + { + "vuid": "VUID-VkBufferCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkBufferCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkBufferCreateFlagBits values" + }, + { + "vuid": "VUID-VkBufferCreateInfo-usage-parameter", + "text": " usage must be a valid combination of VkBufferUsageFlagBits values" + }, + { + "vuid": "VUID-VkBufferCreateInfo-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-VkBufferCreateInfo-sharingMode-parameter", + "text": " sharingMode must be a valid VkSharingMode value" + } + ], + "!(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkBufferCreateInfo-sharingMode-01391", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the physicalDevice that was used to create device" + } + ], + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkBufferCreateInfo-sharingMode-01419", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by either vkGetPhysicalDeviceQueueFamilyProperties or vkGetPhysicalDeviceQueueFamilyProperties2 for the physicalDevice that was used to create device" + } + ], + "(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkBufferCreateInfo-pNext-00920", + "text": " If the pNext chain contains an instance of VkExternalMemoryBufferCreateInfo, its handleTypes member must only contain bits that are also in VkExternalBufferProperties::externalMemoryProperties.pname:compatibleHandleTypes, as returned by vkGetPhysicalDeviceExternalBufferProperties with pExternalBufferInfo\\-&amp;gt;handleType equal to any one of the handle types specified in VkExternalMemoryBufferCreateInfo::handleTypes" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkBufferCreateInfo-flags-01887", + "text": " If the protected memory feature is not enabled, flags must not contain VK_BUFFER_CREATE_PROTECTED_BIT" + }, + { + "vuid": "VUID-VkBufferCreateInfo-None-01888", + "text": " If any of the bits VK_BUFFER_CREATE_SPARSE_BINDING_BIT, VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, or VK_BUFFER_CREATE_SPARSE_ALIASED_BIT are set, VK_BUFFER_CREATE_PROTECTED_BIT must not also be set" + } + ], + "(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkBufferCreateInfo-pNext-01571", + "text": " If the pNext chain contains an instance of VkDedicatedAllocationBufferCreateInfoNV, and the dedicatedAllocation member of the chained structure is VK_TRUE, then flags must not include VK_BUFFER_CREATE_SPARSE_BINDING_BIT, VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, or VK_BUFFER_CREATE_SPARSE_ALIASED_BIT" + } + ] + }, + "VkDedicatedAllocationBufferCreateInfoNV": { + "(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkDedicatedAllocationBufferCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV" + } + ] + }, + "VkExternalMemoryBufferCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkExternalMemoryBufferCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO" + }, + { + "vuid": "VUID-VkExternalMemoryBufferCreateInfo-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBits values" + } + ] + }, + "vkDestroyBuffer": { + "core": [ + { + "vuid": "VUID-vkDestroyBuffer-buffer-00922", + "text": " All submitted commands that refer to buffer, either directly or via a VkBufferView, must have completed execution" + }, + { + "vuid": "VUID-vkDestroyBuffer-buffer-00923", + "text": " If VkAllocationCallbacks were provided when buffer was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyBuffer-buffer-00924", + "text": " If no VkAllocationCallbacks were provided when buffer was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyBuffer-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyBuffer-buffer-parameter", + "text": " If buffer is not VK_NULL_HANDLE, buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkDestroyBuffer-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyBuffer-buffer-parent", + "text": " If buffer is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateBufferView": { + "core": [ + { + "vuid": "VUID-vkCreateBufferView-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateBufferView-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkBufferViewCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateBufferView-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateBufferView-pView-parameter", + "text": " pView must be a valid pointer to a VkBufferView handle" + } + ] + }, + "VkBufferViewCreateInfo": { + "core": [ + { + "vuid": "VUID-VkBufferViewCreateInfo-offset-00925", + "text": " offset must be less than the size of buffer" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-offset-00926", + "text": " offset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-range-00928", + "text": " If range is not equal to VK_WHOLE_SIZE, range must be greater than 0" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-range-00929", + "text": " If range is not equal to VK_WHOLE_SIZE, range must be a multiple of the element size of format" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-range-00930", + "text": " If range is not equal to VK_WHOLE_SIZE, range divided by the element size of format must be less than or equal to VkPhysicalDeviceLimits::maxTexelBufferElements" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-offset-00931", + "text": " If range is not equal to VK_WHOLE_SIZE, the sum of offset and range must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-buffer-00932", + "text": " buffer must have been created with a usage value containing at least one of VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-buffer-00933", + "text": " If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, format must be supported for uniform texel buffers, as specified by the VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-buffer-00934", + "text": " If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, format must be supported for storage texel buffers, as specified by the VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-buffer-00935", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-format-parameter", + "text": " format must be a valid VkFormat value" + } + ] + }, + "vkDestroyBufferView": { + "core": [ + { + "vuid": "VUID-vkDestroyBufferView-bufferView-00936", + "text": " All submitted commands that refer to bufferView must have completed execution" + }, + { + "vuid": "VUID-vkDestroyBufferView-bufferView-00937", + "text": " If VkAllocationCallbacks were provided when bufferView was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyBufferView-bufferView-00938", + "text": " If no VkAllocationCallbacks were provided when bufferView was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyBufferView-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyBufferView-bufferView-parameter", + "text": " If bufferView is not VK_NULL_HANDLE, bufferView must be a valid VkBufferView handle" + }, + { + "vuid": "VUID-vkDestroyBufferView-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyBufferView-bufferView-parent", + "text": " If bufferView is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateImage": { + "core": [ + { + "vuid": "VUID-vkCreateImage-flags-00939", + "text": " If the flags member of pCreateInfo includes VK_IMAGE_CREATE_SPARSE_BINDING_BIT, creating this VkImage must not cause the total required sparse memory for all currently valid sparse resources on the device to exceed VkPhysicalDeviceLimits::sparseAddressSpaceSize" + }, + { + "vuid": "VUID-vkCreateImage-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateImage-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkImageCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateImage-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateImage-pImage-parameter", + "text": " pImage must be a valid pointer to a VkImage handle" + } + ] + }, + "VkImageCreateInfo": { + "!(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkImageCreateInfo-format-00940", + "text": " The combination of format, imageType, tiling, usage, and flags must be supported, as indicated by a VK_SUCCESS return value from vkGetPhysicalDeviceImageFormatProperties invoked with the same values passed to the corresponding parameters." + } + ], + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkImageCreateInfo-pNext-01889", + "text": " If the pNext chain doesn’t contain an instance of VkExternalFormatANDROID, or if format is not VK_FORMAT_UNDEFINED, the combination of format, imageType, tiling, usage, and flags must be supported, as indicated by a VK_SUCCESS return value from vkGetPhysicalDeviceImageFormatProperties invoked with the same values passed to the corresponding parameters." + }, + { + "vuid": "VUID-VkImageCreateInfo-pNext-01892", + "text": " If the pNext chain includes a VkExternalMemoryImageCreateInfo structure whose handleTypes member includes VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:" + }, + { + "vuid": "VUID-VkImageCreateInfo-pNext-01893", + "text": " If the pNext chain includes a VkExternalFormatANDROID structure whose externalFormat member is not 0:" + } + ], + "core": [ + { + "vuid": "VUID-VkImageCreateInfo-sharingMode-00941", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values" + }, + { + "vuid": "VUID-VkImageCreateInfo-sharingMode-00942", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1" + }, + { + "vuid": "VUID-VkImageCreateInfo-format-00943", + "text": " format must not be VK_FORMAT_UNDEFINED" + }, + { + "vuid": "VUID-VkImageCreateInfo-extent-00944", + "text": " extent::width must be greater than 0." + }, + { + "vuid": "VUID-VkImageCreateInfo-extent-00945", + "text": " extent::height must be greater than 0." + }, + { + "vuid": "VUID-VkImageCreateInfo-extent-00946", + "text": " extent::depth must be greater than 0." + }, + { + "vuid": "VUID-VkImageCreateInfo-mipLevels-00947", + "text": " mipLevels must be greater than 0" + }, + { + "vuid": "VUID-VkImageCreateInfo-arrayLayers-00948", + "text": " arrayLayers must be greater than 0" + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-00949", + "text": " If flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, imageType must be VK_IMAGE_TYPE_2D" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00951", + "text": " If imageType is VK_IMAGE_TYPE_1D, extent.width must be less than or equal to VkPhysicalDeviceLimits::maxImageDimension1D, or VkImageFormatProperties::maxExtent.width (as returned by vkGetPhysicalDeviceImageFormatProperties with format, imageType, tiling, usage, and flags equal to those in this structure) - whichever is higher" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00952", + "text": " If imageType is VK_IMAGE_TYPE_2D and flags does not contain VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, extent.width and extent.height must be less than or equal to VkPhysicalDeviceLimits::maxImageDimension2D, or VkImageFormatProperties::maxExtent.width/height (as returned by vkGetPhysicalDeviceImageFormatProperties with format, imageType, tiling, usage, and flags equal to those in this structure) - whichever is higher" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00953", + "text": " If imageType is VK_IMAGE_TYPE_2D and flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, extent.width and extent.height must be less than or equal to VkPhysicalDeviceLimits::maxImageDimensionCube, or VkImageFormatProperties::maxExtent.width/height (as returned by vkGetPhysicalDeviceImageFormatProperties with format, imageType, tiling, usage, and flags equal to those in this structure) - whichever is higher" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00954", + "text": " If imageType is VK_IMAGE_TYPE_2D and flags contains VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, extent.width and extent.height must be equal and arrayLayers must be greater than or equal to 6" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00955", + "text": " If imageType is VK_IMAGE_TYPE_3D, extent.width, extent.height and extent.depth must be less than or equal to VkPhysicalDeviceLimits::maxImageDimension3D, or VkImageFormatProperties::maxExtent.width/height/depth (as returned by vkGetPhysicalDeviceImageFormatProperties with format, imageType, tiling, usage, and flags equal to those in this structure) - whichever is higher" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00956", + "text": " If imageType is VK_IMAGE_TYPE_1D, both extent.height and extent.depth must be 1" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00957", + "text": " If imageType is VK_IMAGE_TYPE_2D, extent.depth must be 1" + }, + { + "vuid": "VUID-VkImageCreateInfo-mipLevels-00958", + "text": " mipLevels must be less than or equal to {lfloor}log2(max(extent.width, extent.height, extent.depth)){rfloor} + 1." + }, + { + "vuid": "VUID-VkImageCreateInfo-extent-00959", + "text": " mipLevels must be less than or equal to VkImageFormatProperties::maxMipLevels (as returned by vkGetPhysicalDeviceImageFormatProperties with format, imageType, tiling, usage, and flags equal to those in this structure)" + }, + { + "vuid": "VUID-VkImageCreateInfo-arrayLayers-00960", + "text": " arrayLayers must be less than or equal to VkImageFormatProperties::maxArrayLayers (as returned by vkGetPhysicalDeviceImageFormatProperties with format, imageType, tiling, usage, and flags equal to those in this structure)" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00961", + "text": " If imageType is VK_IMAGE_TYPE_3D, arrayLayers must be 1." + }, + { + "vuid": "VUID-VkImageCreateInfo-samples-00962", + "text": " If samples is not VK_SAMPLE_COUNT_1_BIT, imageType must be VK_IMAGE_TYPE_2D, flags must not contain VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, tiling must be VK_IMAGE_TILING_OPTIMAL, and mipLevels must be equal to 1" + }, + { + "vuid": "VUID-VkImageCreateInfo-usage-00963", + "text": " If usage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, then bits other than VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, and VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT must not be set" + }, + { + "vuid": "VUID-VkImageCreateInfo-usage-00964", + "text": " If usage includes VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, extent.width must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferWidth" + }, + { + "vuid": "VUID-VkImageCreateInfo-usage-00965", + "text": " If usage includes VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, extent.height must be less than or equal to VkPhysicalDeviceLimits::maxFramebufferHeight" + }, + { + "vuid": "VUID-VkImageCreateInfo-usage-00966", + "text": " If usage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, usage must also contain at least one of VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT." + }, + { + "vuid": "VUID-VkImageCreateInfo-samples-00967", + "text": " samples must be a bit value that is set in VkImageFormatProperties::sampleCounts returned by vkGetPhysicalDeviceImageFormatProperties with format, imageType, tiling, usage, and flags equal to those in this structure" + }, + { + "vuid": "VUID-VkImageCreateInfo-usage-00968", + "text": " If the &amp;lt;&amp;lt;features-features-shaderStorageImageMultisample,multisampled storage images&amp;gt;&amp;gt; feature is not enabled, and usage contains VK_IMAGE_USAGE_STORAGE_BIT, samples must be VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-00969", + "text": " If the &amp;lt;&amp;lt;features-features-sparseBinding,sparse bindings&amp;gt;&amp;gt; feature is not enabled, flags must not contain VK_IMAGE_CREATE_SPARSE_BINDING_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-01924", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidencyAliased,sparse aliased residency&amp;gt;&amp;gt; feature is not enabled, flags must not contain VK_IMAGE_CREATE_SPARSE_ALIASED_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00970", + "text": " If imageType is VK_IMAGE_TYPE_1D, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00971", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidencyImage2D,sparse residency for 2D images&amp;gt;&amp;gt; feature is not enabled, and imageType is VK_IMAGE_TYPE_2D, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00972", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidencyImage3D,sparse residency for 3D images&amp;gt;&amp;gt; feature is not enabled, and imageType is VK_IMAGE_TYPE_3D, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00973", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidency2Samples,sparse residency for images with 2 samples&amp;gt;&amp;gt; feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_2_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00974", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidency4Samples,sparse residency for images with 4 samples&amp;gt;&amp;gt; feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_4_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00975", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidency8Samples,sparse residency for images with 8 samples&amp;gt;&amp;gt; feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_8_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-00976", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidency16Samples,sparse residency for images with 16 samples&amp;gt;&amp;gt; feature is not enabled, imageType is VK_IMAGE_TYPE_2D, and samples is VK_SAMPLE_COUNT_16_BIT, flags must not contain VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-00987", + "text": " If flags contains VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, it must also contain VK_IMAGE_CREATE_SPARSE_BINDING_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-None-01925", + "text": " If any of the bits VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT are set, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT must not also be set" + }, + { + "vuid": "VUID-VkImageCreateInfo-initialLayout-00993", + "text": " initialLayout must be VK_IMAGE_LAYOUT_UNDEFINED or VK_IMAGE_LAYOUT_PREINITIALIZED." + }, + { + "vuid": "VUID-VkImageCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO" + }, + { + "vuid": "VUID-VkImageCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDedicatedAllocationImageCreateInfoNV, VkExternalFormatANDROID, VkExternalMemoryImageCreateInfo, VkExternalMemoryImageCreateInfoNV, VkImageFormatListCreateInfoKHR, or VkImageSwapchainCreateInfoKHR" + }, + { + "vuid": "VUID-VkImageCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkImageCreateFlagBits values" + }, + { + "vuid": "VUID-VkImageCreateInfo-imageType-parameter", + "text": " imageType must be a valid VkImageType value" + }, + { + "vuid": "VUID-VkImageCreateInfo-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkImageCreateInfo-samples-parameter", + "text": " samples must be a valid VkSampleCountFlagBits value" + }, + { + "vuid": "VUID-VkImageCreateInfo-tiling-parameter", + "text": " tiling must be a valid VkImageTiling value" + }, + { + "vuid": "VUID-VkImageCreateInfo-usage-parameter", + "text": " usage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-VkImageCreateInfo-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-VkImageCreateInfo-sharingMode-parameter", + "text": " sharingMode must be a valid VkSharingMode value" + }, + { + "vuid": "VUID-VkImageCreateInfo-initialLayout-parameter", + "text": " initialLayout must be a valid VkImageLayout value" + } + ], + "!(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkImageCreateInfo-sharingMode-01392", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the physicalDevice that was used to create device" + } + ], + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkImageCreateInfo-sharingMode-01420", + "text": " If sharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by either vkGetPhysicalDeviceQueueFamilyProperties or vkGetPhysicalDeviceQueueFamilyProperties2 for the physicalDevice that was used to create device" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkImageCreateInfo-flags-00950", + "text": " If flags contains VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, imageType must be VK_IMAGE_TYPE_3D" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkImageCreateInfo-flags-01890", + "text": " If the protected memory feature is not enabled, flags must not contain VK_IMAGE_CREATE_PROTECTED_BIT." + }, + { + "vuid": "VUID-VkImageCreateInfo-None-01891", + "text": " If any of the bits VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT are set, VK_IMAGE_CREATE_PROTECTED_BIT must not also be set." + } + ], + "(VK_VERSION_1_1,VK_KHR_external_memory)+(VK_NV_external_memory)": [ + { + "vuid": "VUID-VkImageCreateInfo-pNext-00988", + "text": " If the pNext chain contains an instance of VkExternalMemoryImageCreateInfoNV, it must not contain an instance of VkExternalMemoryImageCreateInfo." + } + ], + "(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkImageCreateInfo-pNext-00990", + "text": " If the pNext chain contains an instance of VkExternalMemoryImageCreateInfo, its handleTypes member must only contain bits that are also in VkExternalImageFormatProperties::externalMemoryProperties::compatibleHandleTypes, as returned by vkGetPhysicalDeviceImageFormatProperties2 with format, imageType, tiling, usage, and flags equal to those in this structure, and with an instance of VkPhysicalDeviceExternalImageFormatInfo in the pNext chain, with a handleType equal to any one of the handle types specified in VkExternalMemoryImageCreateInfo::handleTypes" + } + ], + "(VK_NV_external_memory+VK_NV_external_memory_capabilities)": [ + { + "vuid": "VUID-VkImageCreateInfo-pNext-00991", + "text": " If the pNext chain contains an instance of VkExternalMemoryImageCreateInfoNV, its handleTypes member must only contain bits that are also in VkExternalImageFormatPropertiesNV::externalMemoryProperties::compatibleHandleTypes, as returned by vkGetPhysicalDeviceExternalImageFormatPropertiesNV with format, imageType, tiling, usage, and flags equal to those in this structure, and with externalHandleType equal to any one of the handle types specified in VkExternalMemoryImageCreateInfoNV::handleTypes" + } + ], + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkImageCreateInfo-physicalDeviceCount-01421", + "text": " If the logical device was created with VkDeviceGroupDeviceCreateInfo::physicalDeviceCount equal to 1, flags must not contain VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-00992", + "text": " If flags contains VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, then mipLevels must be one, arrayLayers must be one, imageType must be VK_IMAGE_TYPE_2D, and tiling must be VK_IMAGE_TILING_OPTIMAL" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkImageCreateInfo-flags-01572", + "text": " If flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, then format must be a &amp;lt;&amp;lt;appendix-compressedtex-bc,block-compressed image format&amp;gt;&amp;gt;, an &amp;lt;&amp;lt;appendix-compressedtex-etc2, ETC compressed image format&amp;gt;&amp;gt;, or an &amp;lt;&amp;lt;appendix-compressedtex-astc, ASTC compressed image format&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkImageCreateInfo-flags-01573", + "text": " If flags contains VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, then flags must also contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT." + } + ], + "(VK_VERSION_1_1,VK_KHR_external_memory,VK_NV_external_memory)": [ + { + "vuid": "VUID-VkImageCreateInfo-pNext-01443", + "text": " If the pNext chain includes a ifdef::VK_VERSION_1_1,VK_KHR_external_memory[VkExternalMemoryImageCreateInfo]" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageCreateInfo-format-01574", + "text": " If the image format is one of those listed in &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion&amp;gt;&amp;gt;:" + }, + { + "vuid": "VUID-VkImageCreateInfo-tiling-01575", + "text": " If tiling is VK_IMAGE_TILING_OPTIMAL, format is a multi-planar format, and VkFormatProperties::optimalTilingFeatures (as returned by vkGetPhysicalDeviceFormatProperties with the same value of format) does not include VK_FORMAT_FEATURE_DISJOINT_BIT, flags must not contain VK_IMAGE_CREATE_DISJOINT_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-tiling-01576", + "text": " If tiling is VK_IMAGE_TILING_LINEAR, format is a multi-planar format, and VkFormatProperties::linearTilingFeatures (as returned by vkGetPhysicalDeviceFormatProperties with the same value of format) does not include VK_FORMAT_FEATURE_DISJOINT_BIT, flags must not contain VK_IMAGE_CREATE_DISJOINT_BIT" + }, + { + "vuid": "VUID-VkImageCreateInfo-format-01577", + "text": " If format is not a multi-planar format, and flags does not include VK_IMAGE_CREATE_ALIAS_BIT, flags must not contain VK_IMAGE_CREATE_DISJOINT_BIT" + } + ], + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkImageCreateInfo-flags-01533", + "text": " If flags contains VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT format must be a depth or depth/stencil format" + } + ] + }, + "VkDedicatedAllocationImageCreateInfoNV": { + "(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkDedicatedAllocationImageCreateInfoNV-dedicatedAllocation-00994", + "text": " If dedicatedAllocation is VK_TRUE, VkImageCreateInfo::flags must not include VK_IMAGE_CREATE_SPARSE_BINDING_BIT, VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, or VK_IMAGE_CREATE_SPARSE_ALIASED_BIT" + }, + { + "vuid": "VUID-VkDedicatedAllocationImageCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV" + } + ] + }, + "VkExternalMemoryImageCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_external_memory)": [ + { + "vuid": "VUID-VkExternalMemoryImageCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO" + }, + { + "vuid": "VUID-VkExternalMemoryImageCreateInfo-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBits values" + }, + { + "vuid": "VUID-VkExternalMemoryImageCreateInfo-handleTypes-requiredbitmask", + "text": " handleTypes must not be 0" + } + ] + }, + "VkExternalMemoryImageCreateInfoNV": { + "(VK_NV_external_memory)": [ + { + "vuid": "VUID-VkExternalMemoryImageCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkExternalMemoryImageCreateInfoNV-handleTypes-parameter", + "text": " handleTypes must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + } + ] + }, + "VkExternalFormatANDROID": { + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkExternalFormatANDROID-externalFormat-01894", + "text": " externalFormat must be 0 or a value returned in the externalFormat member of VkAndroidHardwareBufferFormatPropertiesANDROID by an earlier call to vkGetAndroidHardwareBufferPropertiesANDROID" + }, + { + "vuid": "VUID-VkExternalFormatANDROID-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID" + } + ] + }, + "VkImageSwapchainCreateInfoKHR": { + "(VK_VERSION_1_1,VK_KHR_device_group)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-VkImageSwapchainCreateInfoKHR-swapchain-00995", + "text": " If swapchain is not VK_NULL_HANDLE, the fields of VkImageCreateInfo must match the &amp;lt;&amp;lt;swapchain-wsi-image-create-info, implied image creation parameters&amp;gt;&amp;gt; of the swapchain" + }, + { + "vuid": "VUID-VkImageSwapchainCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkImageSwapchainCreateInfoKHR-swapchain-parameter", + "text": " If swapchain is not VK_NULL_HANDLE, swapchain must be a valid VkSwapchainKHR handle" + } + ] + }, + "VkImageFormatListCreateInfoKHR": { + "(VK_KHR_image_format_list)": [ + { + "vuid": "VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01578", + "text": " If viewFormatCount is not 0, all of the formats in the pViewFormats array must be compatible with the format specified in the format field of VkImageCreateInfo, as described in the compatibility table." + }, + { + "vuid": "VUID-VkImageFormatListCreateInfoKHR-flags-01579", + "text": " If VkImageCreateInfo::flags does not contain VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, viewFormatCount must be 0 or 1." + }, + { + "vuid": "VUID-VkImageFormatListCreateInfoKHR-viewFormatCount-01580", + "text": " If viewFormatCount is not 0, VkImageCreateInfo::format must be in pViewFormats." + }, + { + "vuid": "VUID-VkImageFormatListCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkImageFormatListCreateInfoKHR-pViewFormats-parameter", + "text": " If viewFormatCount is not 0, pViewFormats must be a valid pointer to an array of viewFormatCount valid VkFormat values" + } + ] + }, + "vkGetImageSubresourceLayout": { + "core": [ + { + "vuid": "VUID-vkGetImageSubresourceLayout-image-00996", + "text": " image must have been created with tiling equal to VK_IMAGE_TILING_LINEAR" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-aspectMask-00997", + "text": " The aspectMask member of pSubresource must only have a single bit set" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-mipLevel-01716", + "text": " The mipLevel member of pSubresource must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-arrayLayer-01717", + "text": " The arrayLayer member of pSubresource must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-pSubresource-parameter", + "text": " pSubresource must be a valid pointer to a valid VkImageSubresource structure" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-pLayout-parameter", + "text": " pLayout must be a valid pointer to a VkSubresourceLayout structure" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-image-parent", + "text": " image must have been created, allocated, or retrieved from device" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkGetImageSubresourceLayout-format-01581", + "text": " If the format of image is a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt; with two planes, the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-vkGetImageSubresourceLayout-format-01582", + "text": " If the format of image is a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt; with three planes, the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT or VK_IMAGE_ASPECT_PLANE_2_BIT" + } + ], + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-vkGetImageSubresourceLayout-image-01895", + "text": " If image was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then image must be bound to memory." + } + ] + }, + "VkImageSubresource": { + "core": [ + { + "vuid": "VUID-VkImageSubresource-aspectMask-parameter", + "text": " aspectMask must be a valid combination of VkImageAspectFlagBits values" + }, + { + "vuid": "VUID-VkImageSubresource-aspectMask-requiredbitmask", + "text": " aspectMask must not be 0" + } + ] + }, + "vkDestroyImage": { + "core": [ + { + "vuid": "VUID-vkDestroyImage-image-01000", + "text": " All submitted commands that refer to image, either directly or via a VkImageView, must have completed execution" + }, + { + "vuid": "VUID-vkDestroyImage-image-01001", + "text": " If VkAllocationCallbacks were provided when image was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyImage-image-01002", + "text": " If no VkAllocationCallbacks were provided when image was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyImage-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyImage-image-parameter", + "text": " If image is not VK_NULL_HANDLE, image must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkDestroyImage-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyImage-image-parent", + "text": " If image is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateImageView": { + "core": [ + { + "vuid": "VUID-vkCreateImageView-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateImageView-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkImageViewCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateImageView-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateImageView-pView-parameter", + "text": " pView must be a valid pointer to a VkImageView handle" + } + ] + }, + "VkImageViewCreateInfo": { + "core": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01003", + "text": " If image was not created with VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT then viewType must not be VK_IMAGE_VIEW_TYPE_CUBE or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-viewType-01004", + "text": " If the &amp;lt;&amp;lt;features-features-imageCubeArray,image cubemap arrays&amp;gt;&amp;gt; feature is not enabled, viewType must not be VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01006", + "text": " If image was created with VK_IMAGE_TILING_LINEAR, format must be format that has at least one supported feature bit present in the value of VkFormatProperties::linearTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01007", + "text": " image must have been created with a usage value containing at least one of VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, or VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01008", + "text": " If image was created with VK_IMAGE_TILING_LINEAR and usage contains VK_IMAGE_USAGE_SAMPLED_BIT, format must be supported for sampled images, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT flag in VkFormatProperties::linearTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01009", + "text": " If image was created with VK_IMAGE_TILING_LINEAR and usage contains VK_IMAGE_USAGE_STORAGE_BIT, format must be supported for storage images, as specified by the VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT flag in VkFormatProperties::linearTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01010", + "text": " If image was created with VK_IMAGE_TILING_LINEAR and usage contains VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, format must be supported for color attachments, as specified by the VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT flag in VkFormatProperties::linearTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01011", + "text": " If image was created with VK_IMAGE_TILING_LINEAR and usage contains VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, format must be supported for depth/stencil attachments, as specified by the VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT flag in VkFormatProperties::linearTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01012", + "text": " If image was created with VK_IMAGE_TILING_OPTIMAL, format must be format that has at least one supported feature bit present in the value of VkFormatProperties::optimalTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01013", + "text": " If image was created with VK_IMAGE_TILING_OPTIMAL and usage contains VK_IMAGE_USAGE_SAMPLED_BIT, format must be supported for sampled images, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT flag in VkFormatProperties::optimalTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01014", + "text": " If image was created with VK_IMAGE_TILING_OPTIMAL and usage contains VK_IMAGE_USAGE_STORAGE_BIT, format must be supported for storage images, as specified by the VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT flag in VkFormatProperties::optimalTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01015", + "text": " If image was created with VK_IMAGE_TILING_OPTIMAL and usage contains VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, format must be supported for color attachments, as specified by the VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT flag in VkFormatProperties::optimalTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01016", + "text": " If image was created with VK_IMAGE_TILING_OPTIMAL and usage contains VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, format must be supported for depth/stencil attachments, as specified by the VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT flag in VkFormatProperties::optimalTilingFeatures returned by vkGetPhysicalDeviceFormatProperties with the same value of format" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01478", + "text": " subresourceRange.baseMipLevel must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01718", + "text": " If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01018", + "text": " If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, format must be compatible with the format used to create image, as defined in &amp;lt;&amp;lt;features-formats-compatibility-classes,Format Compatibility Classes&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01020", + "text": " If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-subResourceRange-01021", + "text": " subresourceRange and viewType must be compatible with the image, as described in the &amp;lt;&amp;lt;resources-image-views-compatibility,compatibility table&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageViewUsageCreateInfo or VkSamplerYcbcrConversionInfo" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-viewType-parameter", + "text": " viewType must be a valid VkImageViewType value" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-components-parameter", + "text": " components must be a valid VkComponentMapping structure" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-parameter", + "text": " subresourceRange must be a valid VkImageSubresourceRange structure" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01005", + "text": " If image was created with VK_IMAGE_TYPE_3D but without VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set then viewType must not be VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01482", + "text": " If image is not a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, or viewType is not VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01483", + "text": " If subresourceRange::layerCount is not VK_REMAINING_ARRAY_LAYERS, image is not a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, or viewType is not VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::layerCount must be non-zero and subresourceRange::baseArrayLayer + subresourceRange::layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01484", + "text": " If image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::baseArrayLayer must be less than the extent.depth specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01485", + "text": " If subresourceRange::layerCount is not VK_REMAINING_ARRAY_LAYERS, image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange::layerCount must be non-zero and subresourceRange::baseArrayLayer + subresourceRange::layerCount must be less than or equal to the extent.depth specified in VkImageCreateInfo when image was created" + } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01480", + "text": " subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-01719", + "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01759", + "text": " If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, but without the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, format must be compatible with the format used to create image, as defined in &amp;lt;&amp;lt;features-formats-compatibility-classes,Format Compatibility Classes&amp;gt;&amp;gt;" + } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01760", + "text": " If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, and if the format of the image is not a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar&amp;gt;&amp;gt; format, format must be compatible with the format used to create image, as defined in &amp;lt;&amp;lt;features-formats-compatibility-classes,Format Compatibility Classes&amp;gt;&amp;gt;" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01761", + "text": " If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, but without the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, and if the format of the image is not a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar&amp;gt;&amp;gt; format, format must be compatible with the format used to create image, as defined in &amp;lt;&amp;lt;features-formats-compatibility-classes,Format Compatibility Classes&amp;gt;&amp;gt;" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01583", + "text": " If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, format must be compatible with, or must be an uncompressed format that is size-compatible with, the format used to create image." + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01584", + "text": " If image was created with the VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT flag, the levelCount and layerCount members of subresourceRange must both be 1." + } + ], + "(VK_KHR_image_format_list)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-pNext-01585", + "text": " If a VkImageFormatListCreateInfoKHR structure was included in the pNext chain of the VkImageCreateInfo struct used when creating image and the viewFormatCount field of VkImageFormatListCreateInfoKHR is not zero then format must be one of the formats in VkImageFormatListCreateInfoKHR::pViewFormats." + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01586", + "text": " If image was created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, if the format of the image is a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar&amp;gt;&amp;gt; format, and if subresourceRange.aspectMask is one of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT, then format must be compatible with the VkFormat for the plane of the image format indicated by subresourceRange.aspectMask, as defined in &amp;lt;&amp;lt;features-formats-compatible-planes&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkImageViewCreateInfo-image-01762", + "text": " If image was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag," + } + ], + "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01019", + "text": " If image was not created with the VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT flag, format must be identical to the format used to create image" + } + ], + "(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkImageViewCreateInfo-image-01896", + "text": " If image has an &amp;lt;&amp;lt;memory-external-android-hardware-buffer-external-formats,external format&amp;gt;&amp;gt;:" + } + ] + }, + "VkImageViewUsageCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkImageViewUsageCreateInfo-usage-01587", + "text": " usage must not include any set bits that were not set in the usage member of the VkImageCreateInfo structure used to create the image this image view is created from." + }, + { + "vuid": "VUID-VkImageViewUsageCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO" + }, + { + "vuid": "VUID-VkImageViewUsageCreateInfo-usage-parameter", + "text": " usage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-VkImageViewUsageCreateInfo-usage-requiredbitmask", + "text": " usage must not be 0" + } + ] + }, + "VkImageSubresourceRange": { + "core": [ + { + "vuid": "VUID-VkImageSubresourceRange-levelCount-01720", + "text": " If levelCount is not VK_REMAINING_MIP_LEVELS, it must be greater than 0" + }, + { + "vuid": "VUID-VkImageSubresourceRange-layerCount-01721", + "text": " If layerCount is not VK_REMAINING_ARRAY_LAYERS, it must be greater than 0" + }, + { + "vuid": "VUID-VkImageSubresourceRange-aspectMask-parameter", + "text": " aspectMask must be a valid combination of VkImageAspectFlagBits values" + }, + { + "vuid": "VUID-VkImageSubresourceRange-aspectMask-requiredbitmask", + "text": " aspectMask must not be 0" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageSubresourceRange-aspectMask-01670", + "text": " If aspectMask includes VK_IMAGE_ASPECT_COLOR_BIT, then it must not include any of VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + } + ] + }, + "VkComponentMapping": { + "core": [ + { + "vuid": "VUID-VkComponentMapping-r-parameter", + "text": " r must be a valid VkComponentSwizzle value" + }, + { + "vuid": "VUID-VkComponentMapping-g-parameter", + "text": " g must be a valid VkComponentSwizzle value" + }, + { + "vuid": "VUID-VkComponentMapping-b-parameter", + "text": " b must be a valid VkComponentSwizzle value" + }, + { + "vuid": "VUID-VkComponentMapping-a-parameter", + "text": " a must be a valid VkComponentSwizzle value" + } + ] + }, + "vkDestroyImageView": { + "core": [ + { + "vuid": "VUID-vkDestroyImageView-imageView-01026", + "text": " All submitted commands that refer to imageView must have completed execution" + }, + { + "vuid": "VUID-vkDestroyImageView-imageView-01027", + "text": " If VkAllocationCallbacks were provided when imageView was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyImageView-imageView-01028", + "text": " If no VkAllocationCallbacks were provided when imageView was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyImageView-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyImageView-imageView-parameter", + "text": " If imageView is not VK_NULL_HANDLE, imageView must be a valid VkImageView handle" + }, + { + "vuid": "VUID-vkDestroyImageView-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyImageView-imageView-parent", + "text": " If imageView is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetBufferMemoryRequirements": { + "core": [ + { + "vuid": "VUID-vkGetBufferMemoryRequirements-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetBufferMemoryRequirements-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkGetBufferMemoryRequirements-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements structure" + }, + { + "vuid": "VUID-vkGetBufferMemoryRequirements-buffer-parent", + "text": " buffer must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetImageMemoryRequirements": { + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkGetImageMemoryRequirements-image-01588", + "text": " image must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT flag set" + } + ], + "core": [ + { + "vuid": "VUID-vkGetImageMemoryRequirements-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetImageMemoryRequirements-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkGetImageMemoryRequirements-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements structure" + }, + { + "vuid": "VUID-vkGetImageMemoryRequirements-image-parent", + "text": " image must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetBufferMemoryRequirements2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-vkGetBufferMemoryRequirements2-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetBufferMemoryRequirements2-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkBufferMemoryRequirementsInfo2 structure" + }, + { + "vuid": "VUID-vkGetBufferMemoryRequirements2-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure" + } + ] + }, + "VkBufferMemoryRequirementsInfo2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-VkBufferMemoryRequirementsInfo2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2" + }, + { + "vuid": "VUID-VkBufferMemoryRequirementsInfo2-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkBufferMemoryRequirementsInfo2-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + } + ] + }, + "vkGetImageMemoryRequirements2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-vkGetImageMemoryRequirements2-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetImageMemoryRequirements2-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkImageMemoryRequirementsInfo2 structure" + }, + { + "vuid": "VUID-vkGetImageMemoryRequirements2-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure" + } + ] + }, + "VkImageMemoryRequirementsInfo2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01589", + "text": " If image was created with a multi-planar format and the VK_IMAGE_CREATE_DISJOINT_BIT flag, there must be a VkImagePlaneMemoryRequirementsInfo in the pNext chain of the VkImageMemoryRequirementsInfo2 structure" + }, + { + "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01590", + "text": " If image was not created with the VK_IMAGE_CREATE_DISJOINT_BIT flag, there must not be a VkImagePlaneMemoryRequirementsInfo in the pNext chain of the VkImageMemoryRequirementsInfo2 structure" + }, + { + "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01591", + "text": " If image was created with a single-plane format, there must not be a VkImagePlaneMemoryRequirementsInfo in the pNext chain of the VkImageMemoryRequirementsInfo2 structure" + } + ], + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-01897", + "text": " If image was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then image must be bound to memory." + } + ], + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-VkImageMemoryRequirementsInfo2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2" + }, + { + "vuid": "VUID-VkImageMemoryRequirementsInfo2-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkImagePlaneMemoryRequirementsInfo" + }, + { + "vuid": "VUID-VkImageMemoryRequirementsInfo2-image-parameter", + "text": " image must be a valid VkImage handle" + } + ] + }, + "VkImagePlaneMemoryRequirementsInfo": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-01592", + "text": " planeAspect must be an aspect that exists in the format; that is, for a two-plane image planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT, and for a three-plane image planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-VkImagePlaneMemoryRequirementsInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO" + }, + { + "vuid": "VUID-VkImagePlaneMemoryRequirementsInfo-planeAspect-parameter", + "text": " planeAspect must be a valid VkImageAspectFlagBits value" + } + ] + }, + "VkMemoryRequirements2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-VkMemoryRequirements2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2" + }, + { + "vuid": "VUID-VkMemoryRequirements2-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkMemoryDedicatedRequirements" + } + ] + }, + "VkMemoryDedicatedRequirements": { + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkMemoryDedicatedRequirements-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS" + } + ] + }, + "vkBindBufferMemory": { + "core": [ + { + "vuid": "VUID-vkBindBufferMemory-buffer-01029", + "text": " buffer must not already be backed by a memory object" + }, + { + "vuid": "VUID-vkBindBufferMemory-buffer-01030", + "text": " buffer must not have been created with any sparse memory binding flags" + }, + { + "vuid": "VUID-vkBindBufferMemory-memoryOffset-01031", + "text": " memoryOffset must be less than the size of memory" + }, + { + "vuid": "VUID-vkBindBufferMemory-buffer-01032", + "text": " If buffer was created with the VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, memoryOffset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment" + }, + { + "vuid": "VUID-vkBindBufferMemory-buffer-01033", + "text": " If buffer was created with the VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, memoryOffset must be a multiple of VkPhysicalDeviceLimits::minUniformBufferOffsetAlignment" + }, + { + "vuid": "VUID-vkBindBufferMemory-buffer-01034", + "text": " If buffer was created with the VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, memoryOffset must be a multiple of VkPhysicalDeviceLimits::minStorageBufferOffsetAlignment" + }, + { + "vuid": "VUID-vkBindBufferMemory-memory-01035", + "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer" + }, + { + "vuid": "VUID-vkBindBufferMemory-memoryOffset-01036", + "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer" + }, + { + "vuid": "VUID-vkBindBufferMemory-size-01037", + "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer must be less than or equal to the size of memory minus memoryOffset" + }, + { + "vuid": "VUID-vkBindBufferMemory-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkBindBufferMemory-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkBindBufferMemory-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkBindBufferMemory-buffer-parent", + "text": " buffer must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkBindBufferMemory-memory-parent", + "text": " memory must have been created, allocated, or retrieved from device" + } + ], + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-vkBindBufferMemory-buffer-01444", + "text": " If buffer requires a dedicated allocation(as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been created with VkMemoryDedicatedAllocateInfo::buffer equal to buffer" + }, + { + "vuid": "VUID-vkBindBufferMemory-memory-01508", + "text": " If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer, and memoryOffset must be zero." + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkBindBufferMemory-None-01898", + "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit set, the buffer must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + }, + { + "vuid": "VUID-vkBindBufferMemory-None-01899", + "text": " If buffer was created with the VK_BUFFER_CREATE_PROTECTED_BIT bit not set, the buffer must not be bound to a memory object created with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + } + ], + "(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-vkBindBufferMemory-buffer-01038", + "text": " If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to a buffer handle created with identical creation parameters to buffer and memoryOffset must be zero" + } + ], + "(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-vkBindBufferMemory-buffer-01039", + "text": " If buffer was not created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image" + } + ] + }, + "vkBindBufferMemory2": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [ + { + "vuid": "VUID-vkBindBufferMemory2-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkBindBufferMemory2-pBindInfos-parameter", + "text": " pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindBufferMemoryInfo structures" + }, + { + "vuid": "VUID-vkBindBufferMemory2-bindInfoCount-arraylength", + "text": " bindInfoCount must be greater than 0" + } + ] + }, + "VkBindBufferMemoryInfo": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01593", + "text": " buffer must not already be backed by a memory object" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01594", + "text": " buffer must not have been created with any sparse memory binding flags" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01595", + "text": " memoryOffset must be less than the size of memory" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01596", + "text": " If buffer was created with the VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, memoryOffset must be a multiple of VkPhysicalDeviceLimits::minTexelBufferOffsetAlignment" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01597", + "text": " If buffer was created with the VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, memoryOffset must be a multiple of VkPhysicalDeviceLimits::minUniformBufferOffsetAlignment" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01598", + "text": " If buffer was created with the VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, memoryOffset must be a multiple of VkPhysicalDeviceLimits::minStorageBufferOffsetAlignment" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-memory-01599", + "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-memoryOffset-01600", + "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-size-01601", + "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetBufferMemoryRequirements with buffer must be less than or equal to the size of memory minus memoryOffset" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkBindBufferMemoryDeviceGroupInfo" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-commonparent", + "text": " Both of buffer, and memory must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01602", + "text": " If buffer requires a dedicated allocation(as reported by vkGetBufferMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for buffer), memory must have been created with VkMemoryDedicatedAllocateInfo::buffer equal to buffer and memoryOffset must be zero" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-memory-01900", + "text": " If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::buffer was not VK_NULL_HANDLE, then buffer must equal VkMemoryDedicatedAllocateInfo::buffer and memoryOffset must be zero." + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01603", + "text": " If buffer was created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::buffer equal to buffer and memoryOffset must be zero" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-01604", + "text": " If buffer was not created with VkDedicatedAllocationBufferCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkBindBufferMemoryInfo-pNext-01605", + "text": " If the pNext chain includes VkBindBufferMemoryDeviceGroupInfo, all instances of memory specified by VkBindBufferMemoryDeviceGroupInfo::pDeviceIndices must have been allocated" + } + ] + }, + "VkBindBufferMemoryDeviceGroupInfo": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-deviceIndexCount-01606", + "text": " deviceIndexCount must either be zero or equal to the number of physical devices in the logical device" + }, + { + "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-01607", + "text": " All elements of pDeviceIndices must be valid device indices" + }, + { + "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO" + }, + { + "vuid": "VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-parameter", + "text": " If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values" + } + ] + }, + "vkBindImageMemory": { + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkBindImageMemory-image-01608", + "text": " image must not have been created with the VK_IMAGE_CREATE_DISJOINT_BIT set." + } + ], + "core": [ + { + "vuid": "VUID-vkBindImageMemory-image-01044", + "text": " image must not already be backed by a memory object" + }, + { + "vuid": "VUID-vkBindImageMemory-image-01045", + "text": " image must not have been created with any sparse memory binding flags" + }, + { + "vuid": "VUID-vkBindImageMemory-memoryOffset-01046", + "text": " memoryOffset must be less than the size of memory" + }, + { + "vuid": "VUID-vkBindImageMemory-memory-01047", + "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" + }, + { + "vuid": "VUID-vkBindImageMemory-memoryOffset-01048", + "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" + }, + { + "vuid": "VUID-vkBindImageMemory-size-01049", + "text": " The size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image must be less than or equal to the size of memory minus memoryOffset" + }, + { + "vuid": "VUID-vkBindImageMemory-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkBindImageMemory-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkBindImageMemory-memory-parameter", + "text": " memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-vkBindImageMemory-image-parent", + "text": " image must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkBindImageMemory-memory-parent", + "text": " memory must have been created, allocated, or retrieved from device" + } + ], + "(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-vkBindImageMemory-image-01445", + "text": " If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image" + }, + { + "vuid": "VUID-vkBindImageMemory-memory-01509", + "text": " If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero." + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkBindImageMemory-None-01901", + "text": " If image was created with the VK_IMAGE_CREATE_PROTECTED_BIT bit set, the image must be bound to a memory object allocated with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + }, + { + "vuid": "VUID-vkBindImageMemory-None-01902", + "text": " If image was created with the VK_IMAGE_CREATE_PROTECTED_BIT bit not set, the image must not be bound to a memory object created with a memory type that reports VK_MEMORY_PROPERTY_PROTECTED_BIT" + } + ], + "(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-vkBindImageMemory-image-01050", + "text": " If image was created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::image equal to an image handle created with identical creation parameters to image and memoryOffset must be zero" + } + ], + "(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-vkBindImageMemory-image-01051", + "text": " If image was not created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image" + } + ] + }, + "vkBindImageMemory2": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [ + { + "vuid": "VUID-vkBindImageMemory2-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkBindImageMemory2-pBindInfos-parameter", + "text": " pBindInfos must be a valid pointer to an array of bindInfoCount valid VkBindImageMemoryInfo structures" + }, + { + "vuid": "VUID-vkBindImageMemory2-bindInfoCount-arraylength", + "text": " bindInfoCount must be greater than 0" + } + ] + }, + "VkBindImageMemoryInfo": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01609", + "text": " image must not already be backed by a memory object" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01610", + "text": " image must not have been created with any sparse memory binding flags" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-memoryOffset-01611", + "text": " memoryOffset must be less than the size of memory" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBindImageMemoryDeviceGroupInfo, VkBindImageMemorySwapchainInfoKHR, or VkBindImagePlaneMemoryInfo" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-commonparent", + "text": " Both of image, and memory that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-01612", + "text": " memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-memoryOffset-01613", + "text": " memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with image" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-01614", + "text": " The difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements with the same image" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01615", + "text": " If the pNext chain does not include an instance of the VkBindImagePlaneMemoryInfo structure, memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01616", + "text": " If the pNext chain does not include an instance of the VkBindImagePlaneMemoryInfo structure, memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01617", + "text": " If the pNext chain does not include an instance of the VkBindImagePlaneMemoryInfo structure, the difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with the same image" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01618", + "text": " If the pNext chain includes an instance of the VkBindImagePlaneMemoryInfo structure, image must have been created with the VK_IMAGE_CREATE_DISJOINT_BIT bit set." + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01619", + "text": " If the pNext chain includes an instance of the VkBindImagePlaneMemoryInfo structure, memory must have been allocated using one of the memory types allowed in the memoryTypeBits member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image and the correct planeAspect for this plane in the VkImagePlaneMemoryRequirementsInfo structure attached to the VkImageMemoryRequirementsInfo2’s pNext chain" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01620", + "text": " If the pNext chain includes an instance of the VkBindImagePlaneMemoryInfo structure, memoryOffset must be an integer multiple of the alignment member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with image and the correct planeAspect for this plane in the VkImagePlaneMemoryRequirementsInfo structure attached to the VkImageMemoryRequirementsInfo2’s pNext chain" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01621", + "text": " If the pNext chain includes an instance of the VkBindImagePlaneMemoryInfo structure, the difference of the size of memory and memoryOffset must be greater than or equal to the size member of the VkMemoryRequirements structure returned from a call to vkGetImageMemoryRequirements2 with the same image and the correct planeAspect for this plane in the VkImagePlaneMemoryRequirementsInfo structure attached to the VkImageMemoryRequirementsInfo2’s pNext chain" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01622", + "text": " If image requires a dedicated allocation (as reported by vkGetImageMemoryRequirements2 in VkMemoryDedicatedRequirements::requiresDedicatedAllocation for image), memory must have been created with VkMemoryDedicatedAllocateInfo::image equal to image and memoryOffset must be zero" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-01903", + "text": " If the VkMemoryAllocateInfo provided when memory was allocated included an instance of VkMemoryDedicatedAllocateInfo in its pNext chain, and VkMemoryDedicatedAllocateInfo::image was not VK_NULL_HANDLE, then image must equal VkMemoryDedicatedAllocateInfo::image and memoryOffset must be zero." + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01623", + "text": " If image was created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must have been created with VkDedicatedAllocationMemoryAllocateInfoNV::image equal to image and memoryOffset must be zero" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_NV_dedicated_allocation)+!(VK_VERSION_1_1,VK_KHR_dedicated_allocation)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01624", + "text": " If image was not created with VkDedicatedAllocationImageCreateInfoNV::dedicatedAllocation equal to VK_TRUE, memory must not have been allocated dedicated for a specific buffer or image" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+!(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-memory-01625", + "text": " memory must be a valid VkDeviceMemory handle" + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01626", + "text": " If the pNext chain includes VkBindImageMemoryDeviceGroupInfo, all instances of memory specified by VkBindImageMemoryDeviceGroupInfo::pDeviceIndices must have been allocated" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01627", + "text": " If the pNext chain includes VkBindImageMemoryDeviceGroupInfo, and VkBindImageMemoryDeviceGroupInfo::splitInstanceBindRegionCount is not zero, then image must have been created with the VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT bit set" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01628", + "text": " If the pNext chain includes VkBindImageMemoryDeviceGroupInfo, all elements of VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions must be valid rectangles contained within the dimensions of image" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01629", + "text": " If the pNext chain includes VkBindImageMemoryDeviceGroupInfo, the union of the areas of all elements of VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions that correspond to the same instance of image must cover the entire image." + } + ], + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-VkBindImageMemoryInfo-image-01630", + "text": " If image was created with a valid swapchain handle in VkImageSwapchainCreateInfoKHR::swapchain, then the pNext chain must include a valid instance of VkBindImageMemorySwapchainInfoKHR" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01631", + "text": " If the pNext chain includes an instance of VkBindImageMemorySwapchainInfoKHR, memory must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-pNext-01632", + "text": " If the pNext chain does not include an instance of VkBindImageMemorySwapchainInfoKHR, memory must be a valid VkDeviceMemory handle" + } + ] + }, + "VkBindImageMemoryDeviceGroupInfo": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01633", + "text": " At least one of deviceIndexCount and splitInstanceBindRegionCount must be zero." + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01634", + "text": " deviceIndexCount must either be zero or equal to the number of physical devices in the logical device" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-01635", + "text": " All elements of pDeviceIndices must be valid device indices." + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-splitInstanceBindRegionCount-01636", + "text": " splitInstanceBindRegionCount must either be zero or equal to the number of physical devices in the logical device squared" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-01637", + "text": " Elements of pSplitInstanceBindRegions that correspond to the same instance of an image must not overlap." + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-offset-01638", + "text": " The offset.x member of any element of pSplitInstanceBindRegions must be a multiple of the sparse image block width (VkSparseImageFormatProperties::imageGranularity.width) of all non-metadata aspects of the image" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-offset-01639", + "text": " The offset.y member of any element of pSplitInstanceBindRegions must be a multiple of the sparse image block height (VkSparseImageFormatProperties::imageGranularity.height) of all non-metadata aspects of the image" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-extent-01640", + "text": " The extent.width member of any element of pSplitInstanceBindRegions must either be a multiple of the sparse image block width of all non-metadata aspects of the image, or else extent.width + offset.x must equal the width of the image subresource" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-extent-01641", + "text": " The extent.height member of any element of pSplitInstanceBindRegions must either be a multiple of the sparse image block height of all non-metadata aspects of the image, or else extent.height
offset.y must equal the width of the image subresource" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-parameter", + "text": " If deviceIndexCount is not 0, pDeviceIndices must be a valid pointer to an array of deviceIndexCount uint32_t values" + }, + { + "vuid": "VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-parameter", + "text": " If splitInstanceBindRegionCount is not 0, pSplitInstanceBindRegions must be a valid pointer to an array of splitInstanceBindRegionCount VkRect2D structures" + } + ] + }, + "VkBindImageMemorySwapchainInfoKHR": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_device_group)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-VkBindImageMemorySwapchainInfoKHR-imageIndex-01644", + "text": " imageIndex must be less than the number of images in swapchain" + }, + { + "vuid": "VUID-VkBindImageMemorySwapchainInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR" + }, + { + "vuid": "VUID-VkBindImageMemorySwapchainInfoKHR-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + } + ] + }, + "VkBindImagePlaneMemoryInfo": { + "(VK_VERSION_1_1,VK_KHR_bind_memory2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkBindImagePlaneMemoryInfo-planeAspect-01642", + "text": " planeAspect must be a single valid plane aspect for the image format (that is, planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT for “_2PLANE” formats and planeAspect must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT for “_3PLANE” formats)" + }, + { + "vuid": "VUID-VkBindImagePlaneMemoryInfo-None-01643", + "text": " A single call to vkBindImageMemory2 must bind all or none of the planes of an image (i.e. bindings to all planes of an image must be made in a single vkBindImageMemory2 call), as separate bindings" + }, + { + "vuid": "VUID-VkBindImagePlaneMemoryInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO" + }, + { + "vuid": "VUID-VkBindImagePlaneMemoryInfo-planeAspect-parameter", + "text": " planeAspect must be a valid VkImageAspectFlagBits value" + } + ] + }, + "vkCreateSampler": { + "core": [ + { + "vuid": "VUID-vkCreateSampler-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateSampler-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkSamplerCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateSampler-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateSampler-pSampler-parameter", + "text": " pSampler must be a valid pointer to a VkSampler handle" + } + ] + }, + "VkSamplerCreateInfo": { + "core": [ + { + "vuid": "VUID-VkSamplerCreateInfo-mipLodBias-01069", + "text": " The absolute value of mipLodBias must be less than or equal to VkPhysicalDeviceLimits::maxSamplerLodBias" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-anisotropyEnable-01070", + "text": " If the &amp;lt;&amp;lt;features-features-samplerAnisotropy,anisotropic sampling&amp;gt;&amp;gt; feature is not enabled, anisotropyEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-anisotropyEnable-01071", + "text": " If anisotropyEnable is VK_TRUE, maxAnisotropy must be between 1.0 and VkPhysicalDeviceLimits::maxSamplerAnisotropy, inclusive" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01072", + "text": " If unnormalizedCoordinates is VK_TRUE, minFilter and magFilter must be equal" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01073", + "text": " If unnormalizedCoordinates is VK_TRUE, mipmapMode must be VK_SAMPLER_MIPMAP_MODE_NEAREST" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01074", + "text": " If unnormalizedCoordinates is VK_TRUE, minLod and maxLod must be zero" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01075", + "text": " If unnormalizedCoordinates is VK_TRUE, addressModeU and addressModeV must each be either VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE or VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01076", + "text": " If unnormalizedCoordinates is VK_TRUE, anisotropyEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-unnormalizedCoordinates-01077", + "text": " If unnormalizedCoordinates is VK_TRUE, compareEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01078", + "text": " If any of addressModeU, addressModeV or addressModeW are VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, borderColor must be a valid VkBorderColor value" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01079", + "text": " If the VK_KHR_sampler_mirror_clamp_to_edge extension is not enabled, addressModeU, addressModeV and addressModeW must not be VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-compareEnable-01080", + "text": " If compareEnable is VK_TRUE, compareOp must be a valid VkCompareOp value" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkSamplerReductionModeCreateInfoEXT or VkSamplerYcbcrConversionInfo" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-magFilter-parameter", + "text": " magFilter must be a valid VkFilter value" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-minFilter-parameter", + "text": " minFilter must be a valid VkFilter value" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-mipmapMode-parameter", + "text": " mipmapMode must be a valid VkSamplerMipmapMode value" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-addressModeU-parameter", + "text": " addressModeU must be a valid VkSamplerAddressMode value" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-addressModeV-parameter", + "text": " addressModeV must be a valid VkSamplerAddressMode value" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-addressModeW-parameter", + "text": " addressModeW must be a valid VkSamplerAddressMode value" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkSamplerCreateInfo-minFilter-01645", + "text": " If &amp;lt;&amp;lt;samplers-YCbCr-conversion,sampler Y’CBCR conversion&amp;gt;&amp;gt; is enabled and VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT is not set for the format, minFilter and magFilter must be equal to the sampler Y’CBCR conversion’s chromaFilter" + }, + { + "vuid": "VUID-VkSamplerCreateInfo-addressModeU-01646", + "text": " If &amp;lt;&amp;lt;samplers-YCbCr-conversion,sampler Y’CBCR conversion&amp;gt;&amp;gt; is enabled, addressModeU, addressModeV, and addressModeW must be VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, anisotropyEnable must be VK_FALSE, and unnormalizedCoordinates must be VK_FALSE" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_EXT_sampler_filter_minmax)": [ + { + "vuid": "VUID-VkSamplerCreateInfo-None-01647", + "text": " The sampler reduction mode must be set to VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT if &amp;lt;&amp;lt;samplers-YCbCr-conversion,sampler Y’CBCR conversion&amp;gt;&amp;gt; is enabled" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-VkSamplerCreateInfo-magFilter-01081", + "text": " If either magFilter or minFilter is VK_FILTER_CUBIC_IMG, anisotropyEnable must be VK_FALSE" + } + ], + "(VK_IMG_filter_cubic+VK_EXT_sampler_filter_minmax)": [ + { + "vuid": "VUID-VkSamplerCreateInfo-magFilter-01422", + "text": " If either magFilter or minFilter is VK_FILTER_CUBIC_IMG, the reductionMode member of VkSamplerReductionModeCreateInfoEXT must be VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT" + } + ], + "(VK_EXT_sampler_filter_minmax)": [ + { + "vuid": "VUID-VkSamplerCreateInfo-compareEnable-01423", + "text": " If compareEnable is VK_TRUE, the reductionMode member of VkSamplerReductionModeCreateInfoEXT must be VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT" + } + ] + }, + "VkSamplerReductionModeCreateInfoEXT": { + "(VK_EXT_sampler_filter_minmax)": [ + { + "vuid": "VUID-VkSamplerReductionModeCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkSamplerReductionModeCreateInfoEXT-reductionMode-parameter", + "text": " reductionMode must be a valid VkSamplerReductionModeEXT value" + } + ] + }, + "vkDestroySampler": { + "core": [ + { + "vuid": "VUID-vkDestroySampler-sampler-01082", + "text": " All submitted commands that refer to sampler must have completed execution" + }, + { + "vuid": "VUID-vkDestroySampler-sampler-01083", + "text": " If VkAllocationCallbacks were provided when sampler was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroySampler-sampler-01084", + "text": " If no VkAllocationCallbacks were provided when sampler was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroySampler-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroySampler-sampler-parameter", + "text": " If sampler is not VK_NULL_HANDLE, sampler must be a valid VkSampler handle" + }, + { + "vuid": "VUID-vkDestroySampler-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroySampler-sampler-parent", + "text": " If sampler is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "VkSamplerYcbcrConversionInfo": { + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkSamplerYcbcrConversionInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionInfo-conversion-parameter", + "text": " conversion must be a valid VkSamplerYcbcrConversion handle" + } + ] + }, + "vkCreateSamplerYcbcrConversion": { + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkCreateSamplerYcbcrConversion-None-01648", + "text": " The &amp;lt;&amp;lt;features-features-sampler-YCbCr-conversion, sampler Y’CBCR conversion feature&amp;gt;&amp;gt; must be enabled" + }, + { + "vuid": "VUID-vkCreateSamplerYcbcrConversion-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateSamplerYcbcrConversion-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkSamplerYcbcrConversionCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateSamplerYcbcrConversion-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateSamplerYcbcrConversion-pYcbcrConversion-parameter", + "text": " pYcbcrConversion must be a valid pointer to a VkSamplerYcbcrConversion handle" + } + ] + }, + "VkSamplerYcbcrConversionCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+!(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01649", + "text": " format must not be VK_FORMAT_UNDEFINED" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01904", + "text": " If an external format conversion is being created, format must be VK_FORMAT_UNDEFINED, otherwise it must not be VK_FORMAT_UNDEFINED." + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01650", + "text": " format must support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01651", + "text": " If the format does not support VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_COSITED_EVEN" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-01652", + "text": " If the format does not support VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, xChromaOffset and yChromaOffset must not be VK_CHROMA_LOCATION_MIDPOINT" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-01653", + "text": " format must represent unsigned normalized values (i.e. the format must be a UNORM format)" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-None-01654", + "text": " If the format has a _422 or _420 suffix:" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-01655", + "text": " If ycbcrModel is not VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, then components.r, components.g, and components.b must correspond to channels of the format; that is, components.r, components.g, and components.b must not be VK_COMPONENT_SWIZZLE_ZERO or VK_COMPONENT_SWIZZLE_ONE, and must not correspond to a channel which contains zero or one as a consequence of &amp;lt;&amp;lt;textures-conversion-to-rgba,conversion to RGBA&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-forceExplicitReconstruction-01656", + "text": " If the format does not support VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, forceExplicitReconstruction must be FALSE" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-01657", + "text": " If the format does not support VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, chromaFilter must be VK_FILTER_NEAREST" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkExternalFormatANDROID" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrModel-parameter", + "text": " ycbcrModel must be a valid VkSamplerYcbcrModelConversion value" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-ycbcrRange-parameter", + "text": " ycbcrRange must be a valid VkSamplerYcbcrRange value" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-components-parameter", + "text": " components must be a valid VkComponentMapping structure" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-xChromaOffset-parameter", + "text": " xChromaOffset must be a valid VkChromaLocation value" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-yChromaOffset-parameter", + "text": " yChromaOffset must be a valid VkChromaLocation value" + }, + { + "vuid": "VUID-VkSamplerYcbcrConversionCreateInfo-chromaFilter-parameter", + "text": " chromaFilter must be a valid VkFilter value" + } + ] + }, + "vkDestroySamplerYcbcrConversion": { + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkDestroySamplerYcbcrConversion-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parameter", + "text": " If ycbcrConversion is not VK_NULL_HANDLE, ycbcrConversion must be a valid VkSamplerYcbcrConversion handle" + }, + { + "vuid": "VUID-vkDestroySamplerYcbcrConversion-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroySamplerYcbcrConversion-ycbcrConversion-parent", + "text": " If ycbcrConversion is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateDescriptorSetLayout": { + "core": [ + { + "vuid": "VUID-vkCreateDescriptorSetLayout-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateDescriptorSetLayout-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDescriptorSetLayoutCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateDescriptorSetLayout-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDescriptorSetLayout-pSetLayout-parameter", + "text": " pSetLayout must be a valid pointer to a VkDescriptorSetLayout handle" + } + ] + }, + "VkDescriptorSetLayoutCreateInfo": { + "core": [ + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-binding-00279", + "text": " The VkDescriptorSetLayoutBinding::binding members of the elements of the pBindings array must each have different values." + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDescriptorSetLayoutBindingFlagsCreateInfoEXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkDescriptorSetLayoutCreateFlagBits values" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-pBindings-parameter", + "text": " If bindingCount is not 0, pBindings must be a valid pointer to an array of bindingCount valid VkDescriptorSetLayoutBinding structures" + } + ], + "(VK_KHR_push_descriptor)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-00280", + "text": " If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then all elements of pBindings must not have a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-00281", + "text": " If flags contains VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then the total number of elements of all bindings must be less than or equal to VkPhysicalDevicePushDescriptorPropertiesKHR::maxPushDescriptors" + } + ], + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-flags-03000", + "text": " If any binding has the VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT bit set, flags must include VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutCreateInfo-descriptorType-03001", + "text": " If any binding has the VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT bit set, then all bindings must not have descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC" + } + ] + }, + "VkDescriptorSetLayoutBinding": { + "core": [ + { + "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-00282", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and descriptorCount is not 0 and pImmutableSamplers is not NULL, pImmutableSamplers must be a valid pointer to an array of descriptorCount valid VkSampler handles" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorCount-00283", + "text": " If descriptorCount is not 0, stageFlags must be a valid combination of VkShaderStageFlagBits values" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-01510", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT and descriptorCount is not 0, then stageFlags must be 0 or VK_SHADER_STAGE_FRAGMENT_BIT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBinding-descriptorType-parameter", + "text": " descriptorType must be a valid VkDescriptorType value" + } + ] + }, + "VkDescriptorSetLayoutBindingFlagsCreateInfoEXT": { + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-bindingCount-03002", + "text": " If bindingCount is not zero, bindingCount must equal VkDescriptorSetLayoutCreateInfo::bindingCount" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03004", + "text": " If an element of pBindingFlags includes VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT, then all other elements of VkDescriptorSetLayoutCreateInfo::pBindings must have a smaller value of binding" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformBufferUpdateAfterBind-03005", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingUniformBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingSampledImageUpdateAfterBind-03006", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingSampledImageUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, or VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageImageUpdateAfterBind-03007", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingStorageImageUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageBufferUpdateAfterBind-03008", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingStorageBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUniformTexelBufferUpdateAfterBind-03009", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingUniformTexelBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingStorageTexelBufferUpdateAfterBind-03010", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingStorageTexelBufferUpdateAfterBind is not enabled, all bindings with descriptor type VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-None-03011", + "text": " All bindings with descriptor type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC must not use VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingUpdateUnusedWhilePending-03012", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingUpdateUnusedWhilePending is not enabled, all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingPartiallyBound-03013", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingPartiallyBound is not enabled, all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-descriptorBindingVariableDescriptorCount-03014", + "text": " If VkPhysicalDeviceDescriptorIndexingFeaturesEXT::descriptorBindingVariableDescriptorCount is not enabled, all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-03015", + "text": " If an element of pBindingFlags includes VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT, that element’s descriptorType must not be VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-parameter", + "text": " If bindingCount is not 0, pBindingFlags must be a valid pointer to an array of bindingCount valid combinations of VkDescriptorBindingFlagBitsEXT values" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-pBindingFlags-requiredbitmask", + "text": " Each element of pBindingFlags must not be 0" + } + ], + "(VK_EXT_descriptor_indexing)+(VK_KHR_push_descriptor)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutBindingFlagsCreateInfoEXT-flags-03003", + "text": " If VkDescriptorSetLayoutCreateInfo::flags includes VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then all elements of pBindingFlags must not include VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT, VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT, or VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT" + } + ] + }, + "vkGetDescriptorSetLayoutSupport": { + "(VK_VERSION_1_1,VK_KHR_maintenance3)": [ + { + "vuid": "VUID-vkGetDescriptorSetLayoutSupport-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDescriptorSetLayoutSupport-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDescriptorSetLayoutCreateInfo structure" + }, + { + "vuid": "VUID-vkGetDescriptorSetLayoutSupport-pSupport-parameter", + "text": " pSupport must be a valid pointer to a VkDescriptorSetLayoutSupport structure" + } + ] + }, + "VkDescriptorSetLayoutSupport": { + "(VK_VERSION_1_1,VK_KHR_maintenance3)": [ + { + "vuid": "VUID-VkDescriptorSetLayoutSupport-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT" + }, + { + "vuid": "VUID-VkDescriptorSetLayoutSupport-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDescriptorSetVariableDescriptorCountLayoutSupportEXT" + } + ] + }, + "VkDescriptorSetVariableDescriptorCountLayoutSupportEXT": { + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkDescriptorSetVariableDescriptorCountLayoutSupportEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT" + } + ] + }, + "vkDestroyDescriptorSetLayout": { + "core": [ + { + "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00284", + "text": " If VkAllocationCallbacks were provided when descriptorSetLayout was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-00285", + "text": " If no VkAllocationCallbacks were provided when descriptorSetLayout was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyDescriptorSetLayout-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parameter", + "text": " If descriptorSetLayout is not VK_NULL_HANDLE, descriptorSetLayout must be a valid VkDescriptorSetLayout handle" + }, + { + "vuid": "VUID-vkDestroyDescriptorSetLayout-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyDescriptorSetLayout-descriptorSetLayout-parent", + "text": " If descriptorSetLayout is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreatePipelineLayout": { + "core": [ + { + "vuid": "VUID-vkCreatePipelineLayout-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreatePipelineLayout-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkPipelineLayoutCreateInfo structure" + }, + { + "vuid": "VUID-vkCreatePipelineLayout-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreatePipelineLayout-pPipelineLayout-parameter", + "text": " pPipelineLayout must be a valid pointer to a VkPipelineLayout handle" + } + ] + }, + "VkPipelineLayoutCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-setLayoutCount-00286", + "text": " setLayoutCount must be less than or equal to VkPhysicalDeviceLimits::maxBoundDescriptorSets" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-00292", + "text": " Any two elements of pPushConstantRanges must not include the same stage in stageFlags" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-parameter", + "text": " If setLayoutCount is not 0, pSetLayouts must be a valid pointer to an array of setLayoutCount valid VkDescriptorSetLayout handles" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pPushConstantRanges-parameter", + "text": " If pushConstantRangeCount is not 0, pPushConstantRanges must be a valid pointer to an array of pushConstantRangeCount valid VkPushConstantRange structures" + } + ], + "!(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00287", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSamplers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00288", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER and VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorUniformBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00289", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER and VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00290", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSampledImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00291", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible to any shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01676", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorInputAttachments" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01677", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSamplers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01678", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER accessible across all shader stagess and and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01679", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffersDynamic" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01680", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01681", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffersDynamic" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01682", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSampledImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01683", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-01684", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetInputAttachments" + } + ], + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03016", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSamplers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03017", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER and VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorUniformBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03018", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER and VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03019", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorSampledImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03020", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorStorageImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03021", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxPerStageDescriptorInputAttachments" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03022", + "text": " The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindSamplers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03023", + "text": " The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER and VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindUniformBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03024", + "text": " The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER and VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindStorageBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03025", + "text": " The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindSampledImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03026", + "text": " The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindStorageImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03027", + "text": " The total number of descriptors with a descriptorType of VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible to any given shader stage across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxPerStageDescriptorUpdateAfterBindInputAttachments" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03028", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSamplers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03029", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER accessible across all shader stagess and and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03030", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetUniformBuffersDynamic" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03031", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03032", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageBuffersDynamic" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03033", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetSampledImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03034", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetStorageImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-descriptorType-03035", + "text": " The total number of descriptors in descriptor set layouts created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set with a descriptorType of VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceLimits::maxDescriptorSetInputAttachments" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03036", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_SAMPLER and VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindSamplers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03037", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER accessible across all shader stagess and and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindUniformBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03038", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindUniformBuffersDynamic" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03039", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindStorageBuffers" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03040", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindStorageBuffersDynamic" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03041", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, and VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindSampledImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03042", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, and VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindStorageImages" + }, + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-03043", + "text": " The total number of descriptors of the type VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT accessible across all shader stages and across all elements of pSetLayouts must be less than or equal to VkPhysicalDeviceDescriptorIndexingPropertiesEXT::maxDescriptorSetUpdateAfterBindInputAttachments" + } + ], + "(VK_KHR_push_descriptor)": [ + { + "vuid": "VUID-VkPipelineLayoutCreateInfo-pSetLayouts-00293", + "text": " pSetLayouts must not contain more than one descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR set" + } + ] + }, + "VkPushConstantRange": { + "core": [ + { + "vuid": "VUID-VkPushConstantRange-offset-00294", + "text": " offset must be less than VkPhysicalDeviceLimits::maxPushConstantsSize" + }, + { + "vuid": "VUID-VkPushConstantRange-offset-00295", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-VkPushConstantRange-size-00296", + "text": " size must be greater than 0" + }, + { + "vuid": "VUID-VkPushConstantRange-size-00297", + "text": " size must be a multiple of 4" + }, + { + "vuid": "VUID-VkPushConstantRange-size-00298", + "text": " size must be less than or equal to VkPhysicalDeviceLimits::maxPushConstantsSize minus offset" + }, + { + "vuid": "VUID-VkPushConstantRange-stageFlags-parameter", + "text": " stageFlags must be a valid combination of VkShaderStageFlagBits values" + }, + { + "vuid": "VUID-VkPushConstantRange-stageFlags-requiredbitmask", + "text": " stageFlags must not be 0" + } + ] + }, + "vkDestroyPipelineLayout": { + "core": [ + { + "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-00299", + "text": " If VkAllocationCallbacks were provided when pipelineLayout was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-00300", + "text": " If no VkAllocationCallbacks were provided when pipelineLayout was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyPipelineLayout-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-parameter", + "text": " If pipelineLayout is not VK_NULL_HANDLE, pipelineLayout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-vkDestroyPipelineLayout-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyPipelineLayout-pipelineLayout-parent", + "text": " If pipelineLayout is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCreateDescriptorPool": { + "core": [ + { + "vuid": "VUID-vkCreateDescriptorPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateDescriptorPool-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDescriptorPoolCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateDescriptorPool-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDescriptorPool-pDescriptorPool-parameter", + "text": " pDescriptorPool must be a valid pointer to a VkDescriptorPool handle" + } + ] + }, + "VkDescriptorPoolCreateInfo": { + "core": [ + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-maxSets-00301", + "text": " maxSets must be greater than 0" + }, + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO" + }, + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-flags-parameter", + "text": " flags must be a valid combination of VkDescriptorPoolCreateFlagBits values" + }, + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-pPoolSizes-parameter", + "text": " pPoolSizes must be a valid pointer to an array of poolSizeCount valid VkDescriptorPoolSize structures" + }, + { + "vuid": "VUID-VkDescriptorPoolCreateInfo-poolSizeCount-arraylength", + "text": " poolSizeCount must be greater than 0" + } + ] + }, + "VkDescriptorPoolSize": { + "core": [ + { + "vuid": "VUID-VkDescriptorPoolSize-descriptorCount-00302", + "text": " descriptorCount must be greater than 0" + }, + { + "vuid": "VUID-VkDescriptorPoolSize-type-parameter", + "text": " type must be a valid VkDescriptorType value" + } + ] + }, + "vkDestroyDescriptorPool": { + "core": [ + { + "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-00303", + "text": " All submitted commands that refer to descriptorPool (via any allocated descriptor sets) must have completed execution" + }, + { + "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-00304", + "text": " If VkAllocationCallbacks were provided when descriptorPool was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-00305", + "text": " If no VkAllocationCallbacks were provided when descriptorPool was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyDescriptorPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-parameter", + "text": " If descriptorPool is not VK_NULL_HANDLE, descriptorPool must be a valid VkDescriptorPool handle" + }, + { + "vuid": "VUID-vkDestroyDescriptorPool-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyDescriptorPool-descriptorPool-parent", + "text": " If descriptorPool is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkAllocateDescriptorSets": { + "core": [ + { + "vuid": "VUID-vkAllocateDescriptorSets-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkAllocateDescriptorSets-pAllocateInfo-parameter", + "text": " pAllocateInfo must be a valid pointer to a valid VkDescriptorSetAllocateInfo structure" + }, + { + "vuid": "VUID-vkAllocateDescriptorSets-pDescriptorSets-parameter", + "text": " pDescriptorSets must be a valid pointer to an array of pAllocateInfo::descriptorSetCount VkDescriptorSet handles" + } + ] + }, + "VkDescriptorSetAllocateInfo": { + "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-00306", + "text": " descriptorSetCount must not be greater than the number of sets that are currently available for allocation in descriptorPool" + }, + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorPool-00307", + "text": " descriptorPool must have enough free descriptor capacity remaining to allocate the descriptor sets of the specified layouts" + } + ], + "(VK_KHR_push_descriptor)": [ + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-00308", + "text": " Each element of pSetLayouts must not have been created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR set" + } + ], + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-03044", + "text": " If any element of pSetLayouts was created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT bit set, descriptorPool must have been created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set" + } + ], + "core": [ + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO" + }, + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDescriptorSetVariableDescriptorCountAllocateInfoEXT" + }, + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorPool-parameter", + "text": " descriptorPool must be a valid VkDescriptorPool handle" + }, + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-pSetLayouts-parameter", + "text": " pSetLayouts must be a valid pointer to an array of descriptorSetCount valid VkDescriptorSetLayout handles" + }, + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-descriptorSetCount-arraylength", + "text": " descriptorSetCount must be greater than 0" + }, + { + "vuid": "VUID-VkDescriptorSetAllocateInfo-commonparent", + "text": " Both of descriptorPool, and the elements of pSetLayouts must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkDescriptorSetVariableDescriptorCountAllocateInfoEXT": { + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-descriptorSetCount-03045", + "text": " If descriptorSetCount is not zero, descriptorSetCount must equal VkDescriptorSetAllocateInfo::descriptorSetCount" + }, + { + "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pSetLayouts-03046", + "text": " If VkDescriptorSetAllocateInfo::pSetLayouts[i] has a variable descriptor count binding, then pDescriptorCounts[i] must be less than or equal to the descriptor count specified for that binding when the descriptor set layout was created." + }, + { + "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT" + }, + { + "vuid": "VUID-VkDescriptorSetVariableDescriptorCountAllocateInfoEXT-pDescriptorCounts-parameter", + "text": " If descriptorSetCount is not 0, pDescriptorCounts must be a valid pointer to an array of descriptorSetCount uint32_t values" + } + ] + }, + "vkFreeDescriptorSets": { + "core": [ + { + "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00309", + "text": " All submitted commands that refer to any element of pDescriptorSets must have completed execution" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00310", + "text": " pDescriptorSets must be a valid pointer to an array of descriptorSetCount VkDescriptorSet handles, each element of which must either be a valid handle or VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-00311", + "text": " Each valid handle in pDescriptorSets must have been allocated from descriptorPool" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-descriptorPool-00312", + "text": " descriptorPool must have been created with the VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT flag" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-descriptorPool-parameter", + "text": " descriptorPool must be a valid VkDescriptorPool handle" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-descriptorSetCount-arraylength", + "text": " descriptorSetCount must be greater than 0" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-descriptorPool-parent", + "text": " descriptorPool must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkFreeDescriptorSets-pDescriptorSets-parent", + "text": " Each element of pDescriptorSets that is a valid handle must have been created, allocated, or retrieved from descriptorPool" + } + ] + }, + "vkResetDescriptorPool": { + "core": [ + { + "vuid": "VUID-vkResetDescriptorPool-descriptorPool-00313", + "text": " All uses of descriptorPool (via any allocated descriptor sets) must have completed execution" + }, + { + "vuid": "VUID-vkResetDescriptorPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkResetDescriptorPool-descriptorPool-parameter", + "text": " descriptorPool must be a valid VkDescriptorPool handle" + }, + { + "vuid": "VUID-vkResetDescriptorPool-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-vkResetDescriptorPool-descriptorPool-parent", + "text": " descriptorPool must have been created, allocated, or retrieved from device" + } + ] + }, + "vkUpdateDescriptorSets": { + "!(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-vkUpdateDescriptorSets-dstSet-00314", + "text": " The dstSet member of each element of pDescriptorWrites or pDescriptorCopies must not be used by any command that was recorded to a command buffer which is in the &amp;lt;&amp;lt;commandbuffers-lifecycle, pending state&amp;gt;&amp;gt;." + } + ], + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-vkUpdateDescriptorSets-None-03047", + "text": " Descriptor bindings updated by this command which were created without the VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT or VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT bits set must not be used by any command that was recorded to a command buffer which is in the &amp;lt;&amp;lt;commandbuffers-lifecycle,pending state&amp;gt;&amp;gt;." + } + ], + "core": [ + { + "vuid": "VUID-vkUpdateDescriptorSets-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkUpdateDescriptorSets-pDescriptorWrites-parameter", + "text": " If descriptorWriteCount is not 0, pDescriptorWrites must be a valid pointer to an array of descriptorWriteCount valid VkWriteDescriptorSet structures" + }, + { + "vuid": "VUID-vkUpdateDescriptorSets-pDescriptorCopies-parameter", + "text": " If descriptorCopyCount is not 0, pDescriptorCopies must be a valid pointer to an array of descriptorCopyCount valid VkCopyDescriptorSet structures" + } + ] + }, + "VkWriteDescriptorSet": { + "core": [ + { + "vuid": "VUID-VkWriteDescriptorSet-dstBinding-00315", + "text": " dstBinding must be less than or equal to the maximum value of binding of all VkDescriptorSetLayoutBinding structures specified when dstSet’s descriptor set layout was created" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-dstBinding-00316", + "text": " dstBinding must be a binding with a non-zero descriptorCount" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-00317", + "text": " All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those with a descriptorCount of zero, must have identical descriptorType and stageFlags." + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-00318", + "text": " All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those with a descriptorCount of zero, must all either use immutable samplers or must all not use immutable samplers." + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00319", + "text": " descriptorType must match the type of dstBinding within dstSet" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-dstSet-00320", + "text": " dstSet must be a valid VkDescriptorSet handle" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-dstArrayElement-00321", + "text": " The sum of dstArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding specified by dstBinding, and all applicable consecutive bindings, as described by &amp;lt;&amp;lt;descriptorsets-updates-consecutive&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00322", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, pImageInfo must be a valid pointer to an array of descriptorCount valid VkDescriptorImageInfo structures" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00323", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, pTexelBufferView must be a valid pointer to an array of descriptorCount valid VkBufferView handles" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00324", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, pBufferInfo must be a valid pointer to an array of descriptorCount valid VkDescriptorBufferInfo structures" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00325", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLER or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, and dstSet was not allocated with a layout that included immutable samplers for dstBinding with descriptorType, the sampler member of each element of pImageInfo must be a valid VkSampler object" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00326", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView and imageLayout members of each element of pImageInfo must be a valid VkImageView and VkImageLayout, respectively" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01402", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, for each descriptor that will be accessed via load or store operations the imageLayout member for corresponding elements of pImageInfo must be VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00327", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER or VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, the offset member of each element of pBufferInfo must be a multiple of VkPhysicalDeviceLimits::minUniformBufferOffsetAlignment" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00328", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, the offset member of each element of pBufferInfo must be a multiple of VkPhysicalDeviceLimits::minStorageBufferOffsetAlignment" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00329", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, and the buffer member of any element of pBufferInfo is the handle of a non-sparse buffer, then that buffer must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00330", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER or VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, the buffer member of each element of pBufferInfo must have been created with VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT set" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00331", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, the buffer member of each element of pBufferInfo must have been created with VK_BUFFER_USAGE_STORAGE_BUFFER_BIT set" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00332", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER or VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, the range member of each element of pBufferInfo, or the effective range if range is VK_WHOLE_SIZE, must be less than or equal to VkPhysicalDeviceLimits::maxUniformBufferRange" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00333", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, the range member of each element of pBufferInfo, or the effective range if range is VK_WHOLE_SIZE, must be less than or equal to VkPhysicalDeviceLimits::maxStorageBufferRange" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00334", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, the VkBuffer that each element of pTexelBufferView was created from must have been created with VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT set" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00335", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, the VkBuffer that each element of pTexelBufferView was created from must have been created with VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT set" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00336", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE or VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView member of each element of pImageInfo must have been created with the identity swizzle" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00337", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the imageView member of each element of pImageInfo must have been created with VK_IMAGE_USAGE_SAMPLED_BIT set" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-01403", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, the imageLayout member of each element of pImageInfo must be VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00338", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView member of each element of pImageInfo must have been created with VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT set" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00339", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, the imageView member of each element of pImageInfo must have been created with VK_IMAGE_USAGE_STORAGE_BIT set" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-parameter", + "text": " descriptorType must be a valid VkDescriptorType value" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-arraylength", + "text": " descriptorCount must be greater than 0" + }, + { + "vuid": "VUID-VkWriteDescriptorSet-commonparent", + "text": " Both of dstSet, and the elements of pTexelBufferView that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkWriteDescriptorSet-descriptorCount-03048", + "text": " All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those with a descriptorCount of zero, must have identical VkDescriptorBindingFlagBitsEXT." + } + ] + }, + "VkDescriptorBufferInfo": { + "core": [ + { + "vuid": "VUID-VkDescriptorBufferInfo-offset-00340", + "text": " offset must be less than the size of buffer" + }, + { + "vuid": "VUID-VkDescriptorBufferInfo-range-00341", + "text": " If range is not equal to VK_WHOLE_SIZE, range must be greater than 0" + }, + { + "vuid": "VUID-VkDescriptorBufferInfo-range-00342", + "text": " If range is not equal to VK_WHOLE_SIZE, range must be less than or equal to the size of buffer minus offset" + }, + { + "vuid": "VUID-VkDescriptorBufferInfo-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + } + ] + }, + "VkDescriptorImageInfo": { + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkDescriptorImageInfo-imageView-00343", + "text": " imageView must not be 2D or 2D array image view created from a 3D image" + } + ], + "core": [ + { + "vuid": "VUID-VkDescriptorImageInfo-imageLayout-00344", + "text": " imageLayout must match the actual VkImageLayout of each subresource accessible from imageView at the time this descriptor is accessed" + }, + { + "vuid": "VUID-VkDescriptorImageInfo-commonparent", + "text": " Both of imageView, and sampler that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkDescriptorImageInfo-sampler-01563", + "text": " If sampler is used and enables &amp;lt;&amp;lt;samplers-YCbCr-conversion,sampler Y’CBCR conversion&amp;gt;&amp;gt;:" + }, + { + "vuid": "VUID-VkDescriptorImageInfo-sampler-01564", + "text": " If sampler is used and does not enable &amp;lt;&amp;lt;samplers-YCbCr-conversion, sampler Y’CBCR conversion&amp;gt;&amp;gt; and the VkFormat of the image is a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt;, the image must have been created with VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, and the aspectMask of the imageView must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT or (for three-plane formats only) VK_IMAGE_ASPECT_PLANE_2_BIT" + } + ] + }, + "VkCopyDescriptorSet": { + "core": [ + { + "vuid": "VUID-VkCopyDescriptorSet-srcBinding-00345", + "text": " srcBinding must be a valid binding within srcSet" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-srcArrayElement-00346", + "text": " The sum of srcArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding specified by srcBinding, and all applicable consecutive bindings, as described by &amp;lt;&amp;lt;descriptorsets-updates-consecutive&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-dstBinding-00347", + "text": " dstBinding must be a valid binding within dstSet" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-dstArrayElement-00348", + "text": " The sum of dstArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding specified by dstBinding, and all applicable consecutive bindings, as described by &amp;lt;&amp;lt;descriptorsets-updates-consecutive&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-00349", + "text": " If srcSet is equal to dstSet, then the source and destination ranges of descriptors must not overlap, where the ranges may include array elements from consecutive bindings as described by &amp;lt;&amp;lt;descriptorsets-updates-consecutive&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-parameter", + "text": " srcSet must be a valid VkDescriptorSet handle" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-dstSet-parameter", + "text": " dstSet must be a valid VkDescriptorSet handle" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-commonparent", + "text": " Both of dstSet, and srcSet must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-01918", + "text": " If srcSet’s layout was created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set, then dstSet’s layout must also have been created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-01919", + "text": " If srcSet’s layout was created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set, then dstSet’s layout must also have been created without the VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT flag set" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-01920", + "text": " If the descriptor pool from which srcSet was allocated was created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set, then the descriptor pool from which dstSet was allocated must also have been created with the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set" + }, + { + "vuid": "VUID-VkCopyDescriptorSet-srcSet-01921", + "text": " If the descriptor pool from which srcSet was allocated was created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set, then the descriptor pool from which dstSet was allocated must also have been created without the VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT flag set" + } + ] + }, + "vkCreateDescriptorUpdateTemplate": { + "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [ + { + "vuid": "VUID-vkCreateDescriptorUpdateTemplate-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateDescriptorUpdateTemplate-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDescriptorUpdateTemplateCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateDescriptorUpdateTemplate-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDescriptorUpdateTemplate-pDescriptorUpdateTemplate-parameter", + "text": " pDescriptorUpdateTemplate must be a valid pointer to a VkDescriptorUpdateTemplate handle" + } + ] + }, + "VkDescriptorUpdateTemplateCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [ + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00350", + "text": " If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, descriptorSetLayout must be a valid VkDescriptorSetLayout handle" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-pDescriptorUpdateEntries-parameter", + "text": " pDescriptorUpdateEntries must be a valid pointer to an array of descriptorUpdateEntryCount valid VkDescriptorUpdateTemplateEntry structures" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-parameter", + "text": " templateType must be a valid VkDescriptorUpdateTemplateType value" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorSetLayout-parameter", + "text": " If descriptorSetLayout is not VK_NULL_HANDLE, descriptorSetLayout must be a valid VkDescriptorSetLayout handle" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-descriptorUpdateEntryCount-arraylength", + "text": " descriptorUpdateEntryCount must be greater than 0" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-commonparent", + "text": " Both of descriptorSetLayout, and pipelineLayout that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)+(VK_KHR_push_descriptor)": [ + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00351", + "text": " If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00352", + "text": " If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, pipelineLayout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateCreateInfo-templateType-00353", + "text": " If templateType is VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, set must be the unique set number in the pipeline layout that uses a descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR" + } + ] + }, + "VkDescriptorUpdateTemplateEntry": { + "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [ + { + "vuid": "VUID-VkDescriptorUpdateTemplateEntry-dstBinding-00354", + "text": " dstBinding must be a valid binding in the descriptor set layout implicitly specified when using a descriptor update template to update descriptors." + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateEntry-dstArrayElement-00355", + "text": " dstArrayElement and descriptorCount must be less than or equal to the number of array elements in the descriptor set binding implicitly specified when using a descriptor update template to update descriptors, and all applicable consecutive bindings, as described by &amp;lt;&amp;lt;descriptorsets-updates-consecutive&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkDescriptorUpdateTemplateEntry-descriptorType-parameter", + "text": " descriptorType must be a valid VkDescriptorType value" + } + ] + }, + "vkDestroyDescriptorUpdateTemplate": { + "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [ + { + "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00356", + "text": " If VkAllocationCallbacks were provided when descriptorSetLayout was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorSetLayout-00357", + "text": " If no VkAllocationCallbacks were provided when descriptorSetLayout was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parameter", + "text": " If descriptorUpdateTemplate is not VK_NULL_HANDLE, descriptorUpdateTemplate must be a valid VkDescriptorUpdateTemplate handle" + }, + { + "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyDescriptorUpdateTemplate-descriptorUpdateTemplate-parent", + "text": " If descriptorUpdateTemplate is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkUpdateDescriptorSetWithTemplate": { + "(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [ + { + "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-pData-01685", + "text": " pData must be a valid pointer to a memory that contains one or more valid instances of VkDescriptorImageInfo, VkDescriptorBufferInfo, or VkBufferView in a layout defined by descriptorUpdateTemplate when it was created with vkCreateDescriptorUpdateTemplate" + }, + { + "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-descriptorSet-parameter", + "text": " descriptorSet must be a valid VkDescriptorSet handle" + }, + { + "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parameter", + "text": " descriptorUpdateTemplate must be a valid VkDescriptorUpdateTemplate handle" + }, + { + "vuid": "VUID-vkUpdateDescriptorSetWithTemplate-descriptorUpdateTemplate-parent", + "text": " descriptorUpdateTemplate must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdBindDescriptorSets": { + "core": [ + { + "vuid": "VUID-vkCmdBindDescriptorSets-pDescriptorSets-00358", + "text": " Each element of pDescriptorSets must have been allocated with a VkDescriptorSetLayout that matches (is the same as, or identically defined as) the VkDescriptorSetLayout at set n in layout, where n is the sum of firstSet and the index into pDescriptorSets" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-dynamicOffsetCount-00359", + "text": " dynamicOffsetCount must be equal to the total number of dynamic descriptors in pDescriptorSets" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-firstSet-00360", + "text": " The sum of firstSet and descriptorSetCount must be less than or equal to VkPipelineLayoutCreateInfo::setLayoutCount provided when layout was created" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-pipelineBindPoint-00361", + "text": " pipelineBindPoint must be supported by the commandBuffer’s parent VkCommandPool’s queue family" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-00362", + "text": " Each element of pDynamicOffsets must satisfy the required alignment for the corresponding descriptor binding’s descriptor type" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-pDescriptorSets-parameter", + "text": " pDescriptorSets must be a valid pointer to an array of descriptorSetCount valid VkDescriptorSet handles" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-pDynamicOffsets-parameter", + "text": " If dynamicOffsetCount is not 0, pDynamicOffsets must be a valid pointer to an array of dynamicOffsetCount uint32_t values" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-descriptorSetCount-arraylength", + "text": " descriptorSetCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdBindDescriptorSets-commonparent", + "text": " Each of commandBuffer, layout, and the elements of pDescriptorSets must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdPushDescriptorSetKHR": { + "(VK_KHR_push_descriptor)": [ + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-00363", + "text": " pipelineBindPoint must be supported by the commandBuffer’s parent VkCommandPool’s queue family" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-set-00364", + "text": " set must be less than VkPipelineLayoutCreateInfo::setLayoutCount provided when layout was created" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-set-00365", + "text": " set must be the unique set number in the pipeline layout that uses a descriptor set layout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-pDescriptorWrites-parameter", + "text": " pDescriptorWrites must be a valid pointer to an array of descriptorWriteCount valid VkWriteDescriptorSet structures" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-descriptorWriteCount-arraylength", + "text": " descriptorWriteCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetKHR-commonparent", + "text": " Both of commandBuffer, and layout must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdPushDescriptorSetWithTemplateKHR": { + "(VK_KHR_push_descriptor)+(VK_VERSION_1_1,VK_KHR_descriptor_update_template)": [ + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-00366", + "text": " The pipelineBindPoint specified during the creation of the descriptor update template must be supported by the commandBuffer’s parent VkCommandPool’s queue family" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-pData-01686", + "text": " pData must be a valid pointer to a memory that contains one or more valid instances of VkDescriptorImageInfo, VkDescriptorBufferInfo, or VkBufferView in a layout defined by descriptorUpdateTemplate when it was created with vkCreateDescriptorUpdateTemplateKHR" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-descriptorUpdateTemplate-parameter", + "text": " descriptorUpdateTemplate must be a valid VkDescriptorUpdateTemplate handle" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdPushDescriptorSetWithTemplateKHR-commonparent", + "text": " Each of commandBuffer, descriptorUpdateTemplate, and layout must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdPushConstants": { + "core": [ + { + "vuid": "VUID-vkCmdPushConstants-offset-01795", + "text": " For each byte in the range specified by offset and size and for each shader stage in stageFlags, there must be a push constant range in layout that includes that byte and that stage" + }, + { + "vuid": "VUID-vkCmdPushConstants-offset-01796", + "text": " For each byte in the range specified by offset and size and for each push constant range that overlaps that byte, stageFlags must include all stages in that push constant range’s VkPushConstantRange::stageFlags" + }, + { + "vuid": "VUID-vkCmdPushConstants-offset-00368", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdPushConstants-size-00369", + "text": " size must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdPushConstants-offset-00370", + "text": " offset must be less than VkPhysicalDeviceLimits::maxPushConstantsSize" + }, + { + "vuid": "VUID-vkCmdPushConstants-size-00371", + "text": " size must be less than or equal to VkPhysicalDeviceLimits::maxPushConstantsSize minus offset" + }, + { + "vuid": "VUID-vkCmdPushConstants-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdPushConstants-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-vkCmdPushConstants-stageFlags-parameter", + "text": " stageFlags must be a valid combination of VkShaderStageFlagBits values" + }, + { + "vuid": "VUID-vkCmdPushConstants-stageFlags-requiredbitmask", + "text": " stageFlags must not be 0" + }, + { + "vuid": "VUID-vkCmdPushConstants-pValues-parameter", + "text": " pValues must be a valid pointer to an array of size bytes" + }, + { + "vuid": "VUID-vkCmdPushConstants-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdPushConstants-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdPushConstants-size-arraylength", + "text": " size must be greater than 0" + }, + { + "vuid": "VUID-vkCmdPushConstants-commonparent", + "text": " Both of commandBuffer, and layout must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCreateQueryPool": { + "core": [ + { + "vuid": "VUID-vkCreateQueryPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateQueryPool-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkQueryPoolCreateInfo structure" + }, + { + "vuid": "VUID-vkCreateQueryPool-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateQueryPool-pQueryPool-parameter", + "text": " pQueryPool must be a valid pointer to a VkQueryPool handle" + } + ] + }, + "VkQueryPoolCreateInfo": { + "core": [ + { + "vuid": "VUID-VkQueryPoolCreateInfo-queryType-00791", + "text": " If the &amp;lt;&amp;lt;features-features-pipelineStatisticsQuery,pipeline statistics queries&amp;gt;&amp;gt; feature is not enabled, queryType must not be VK_QUERY_TYPE_PIPELINE_STATISTICS" + }, + { + "vuid": "VUID-VkQueryPoolCreateInfo-queryType-00792", + "text": " If queryType is VK_QUERY_TYPE_PIPELINE_STATISTICS, pipelineStatistics must be a valid combination of VkQueryPipelineStatisticFlagBits values" + }, + { + "vuid": "VUID-VkQueryPoolCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO" + }, + { + "vuid": "VUID-VkQueryPoolCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkQueryPoolCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkQueryPoolCreateInfo-queryType-parameter", + "text": " queryType must be a valid VkQueryType value" + } + ] + }, + "vkDestroyQueryPool": { + "core": [ + { + "vuid": "VUID-vkDestroyQueryPool-queryPool-00793", + "text": " All submitted commands that refer to queryPool must have completed execution" + }, + { + "vuid": "VUID-vkDestroyQueryPool-queryPool-00794", + "text": " If VkAllocationCallbacks were provided when queryPool was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyQueryPool-queryPool-00795", + "text": " If no VkAllocationCallbacks were provided when queryPool was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyQueryPool-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyQueryPool-queryPool-parameter", + "text": " If queryPool is not VK_NULL_HANDLE, queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkDestroyQueryPool-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyQueryPool-queryPool-parent", + "text": " If queryPool is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdResetQueryPool": { + "core": [ + { + "vuid": "VUID-vkCmdResetQueryPool-firstQuery-00796", + "text": " firstQuery must be less than the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdResetQueryPool-firstQuery-00797", + "text": " The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdResetQueryPool-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdResetQueryPool-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdResetQueryPool-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdResetQueryPool-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdResetQueryPool-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdResetQueryPool-commonparent", + "text": " Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdBeginQuery": { + "core": [ + { + "vuid": "VUID-vkCmdBeginQuery-queryPool-01922", + "text": " queryPool must have been created with a queryType that differs from that of any queries that are &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt; within commandBuffer" + }, + { + "vuid": "VUID-vkCmdBeginQuery-None-00807", + "text": " All queries used by the command must be unavailable" + }, + { + "vuid": "VUID-vkCmdBeginQuery-queryType-00800", + "text": " If the &amp;lt;&amp;lt;features-features-occlusionQueryPrecise,precise occlusion queries&amp;gt;&amp;gt; feature is not enabled, or the queryType used to create queryPool was not VK_QUERY_TYPE_OCCLUSION, flags must not contain VK_QUERY_CONTROL_PRECISE_BIT" + }, + { + "vuid": "VUID-vkCmdBeginQuery-query-00802", + "text": " query must be less than the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdBeginQuery-queryType-00803", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_OCCLUSION, the VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBeginQuery-queryType-00804", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate graphics operations, the VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBeginQuery-queryType-00805", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PIPELINE_STATISTICS and any of the pipelineStatistics indicate compute operations, the VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdBeginQuery-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBeginQuery-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdBeginQuery-flags-parameter", + "text": " flags must be a valid combination of VkQueryControlFlagBits values" + }, + { + "vuid": "VUID-vkCmdBeginQuery-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBeginQuery-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdBeginQuery-commonparent", + "text": " Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdBeginQuery-commandBuffer-01885", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdBeginQuery-query-00808", + "text": " If vkCmdBeginQuery is called within a render pass instance, the sum of query and the number of bits set in the current subpass’s view mask must be less than or equal to the number of queries in queryPool" + } + ] + }, + "vkCmdEndQuery": { + "core": [ + { + "vuid": "VUID-vkCmdEndQuery-None-01923", + "text": " All queries used by the command must be &amp;lt;&amp;lt;queries-operation-active,active&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdEndQuery-query-00810", + "text": " query must be less than the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdEndQuery-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdEndQuery-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdEndQuery-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdEndQuery-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdEndQuery-commonparent", + "text": " Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdEndQuery-commandBuffer-01886", + "text": " commandBuffer must not be a protected command buffer" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdEndQuery-query-00812", + "text": " If vkCmdEndQuery is called within a render pass instance, the sum of query and the number of bits set in the current subpass’s view mask must be less than or equal to the number of queries in queryPool" + } + ] + }, + "vkGetQueryPoolResults": { + "core": [ + { + "vuid": "VUID-vkGetQueryPoolResults-firstQuery-00813", + "text": " firstQuery must be less than the number of queries in queryPool" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-flags-00814", + "text": " If VK_QUERY_RESULT_64_BIT is not set in flags then pData and stride must be multiples of 4" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-flags-00815", + "text": " If VK_QUERY_RESULT_64_BIT is set in flags then pData and stride must be multiples of 8" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-firstQuery-00816", + "text": " The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-dataSize-00817", + "text": " dataSize must be large enough to contain the result of each query, as described &amp;lt;&amp;lt;queries-operation-memorylayout,here&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-queryType-00818", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_TIMESTAMP, flags must not contain VK_QUERY_RESULT_PARTIAL_BIT" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-flags-parameter", + "text": " flags must be a valid combination of VkQueryResultFlagBits values" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkGetQueryPoolResults-queryPool-parent", + "text": " queryPool must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdCopyQueryPoolResults": { + "core": [ + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-dstOffset-00819", + "text": " dstOffset must be less than the size of dstBuffer" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-firstQuery-00820", + "text": " firstQuery must be less than the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-firstQuery-00821", + "text": " The sum of firstQuery and queryCount must be less than or equal to the number of queries in queryPool" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-flags-00822", + "text": " If VK_QUERY_RESULT_64_BIT is not set in flags then dstOffset and stride must be multiples of 4" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-flags-00823", + "text": " If VK_QUERY_RESULT_64_BIT is set in flags then dstOffset and stride must be multiples of 8" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00824", + "text": " dstBuffer must have enough storage, from dstOffset, to contain the result of each query, as described &amp;lt;&amp;lt;queries-operation-memorylayout,here&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00825", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-00826", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-00827", + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_TIMESTAMP, flags must not contain VK_QUERY_RESULT_PARTIAL_BIT" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-flags-parameter", + "text": " flags must be a valid combination of VkQueryResultFlagBits values" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdCopyQueryPoolResults-commonparent", + "text": " Each of commandBuffer, dstBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdWriteTimestamp": { + "core": [ + { + "vuid": "VUID-vkCmdWriteTimestamp-queryPool-01416", + "text": " queryPool must have been created with a queryType of VK_QUERY_TYPE_TIMESTAMP" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-queryPool-00828", + "text": " The query identified by queryPool and query must be unavailable" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-timestampValidBits-00829", + "text": " The command pool’s queue family must support a non-zero timestampValidBits" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-pipelineStage-parameter", + "text": " pipelineStage must be a valid VkPipelineStageFlagBits value" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-queryPool-parameter", + "text": " queryPool must be a valid VkQueryPool handle" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-commonparent", + "text": " Both of commandBuffer, and queryPool must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdWriteTimestamp-None-00830", + "text": " All queries used by the command must be unavailable" + }, + { + "vuid": "VUID-vkCmdWriteTimestamp-query-00831", + "text": " If vkCmdWriteTimestamp is called within a render pass instance, the sum of query and the number of bits set in the current subpass’s view mask must be less than or equal to the number of queries in queryPool" + } + ] + }, + "vkCmdClearColorImage": { + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkCmdClearColorImage-image-00001", + "text": " image must use a format that supports VK_FORMAT_FEATURE_TRANSFER_DST_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + } + ], + "core": [ + { + "vuid": "VUID-vkCmdClearColorImage-image-00002", + "text": " image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdClearColorImage-image-00003", + "text": " If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdClearColorImage-imageLayout-00004", + "text": " imageLayout must specify the layout of the image subresource ranges of image specified in pRanges at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdClearColorImage-baseMipLevel-01470", + "text": " The VkImageSubresourceRange::baseMipLevel members of the elements of the pRanges array must each be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearColorImage-pRanges-01692", + "text": " For each VkImageSubresourceRange element of pRanges, if the levelCount member is not VK_REMAINING_MIP_LEVELS, then baseMipLevel + levelCount must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearColorImage-baseArrayLayer-01472", + "text": " The VkImageSubresourceRange::baseArrayLayer members of the elements of the pRanges array must each be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearColorImage-pRanges-01693", + "text": " For each VkImageSubresourceRange element of pRanges, if the layerCount member is not VK_REMAINING_ARRAY_LAYERS, then baseArrayLayer + layerCount must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearColorImage-image-00007", + "text": " image must not have a compressed or depth/stencil format" + }, + { + "vuid": "VUID-vkCmdClearColorImage-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdClearColorImage-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdClearColorImage-imageLayout-parameter", + "text": " imageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdClearColorImage-pColor-parameter", + "text": " pColor must be a valid pointer to a valid VkClearColorValue union" + }, + { + "vuid": "VUID-vkCmdClearColorImage-pRanges-parameter", + "text": " pRanges must be a valid pointer to an array of rangeCount valid VkImageSubresourceRange structures" + }, + { + "vuid": "VUID-vkCmdClearColorImage-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdClearColorImage-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdClearColorImage-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdClearColorImage-rangeCount-arraylength", + "text": " rangeCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdClearColorImage-commonparent", + "text": " Both of commandBuffer, and image must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkCmdClearColorImage-image-01545", + "text": " image must not use a format listed in &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion&amp;gt;&amp;gt;" + } + ], + "!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdClearColorImage-imageLayout-00005", + "text": " imageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdClearColorImage-imageLayout-01394", + "text": " imageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdClearColorImage-commandBuffer-01805", + "text": " If commandBuffer is an unprotected command buffer, then image must not be a protected image" + }, + { + "vuid": "VUID-vkCmdClearColorImage-commandBuffer-01806", + "text": " If commandBuffer is a protected command buffer, then image must not be an unprotected image" + } + ] + }, + "vkCmdClearDepthStencilImage": { + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkCmdClearDepthStencilImage-image-00008", + "text": " image must use a format that supports VK_FORMAT_FEATURE_TRANSFER_DST_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + } + ], + "core": [ + { + "vuid": "VUID-vkCmdClearDepthStencilImage-image-00009", + "text": " image must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-image-00010", + "text": " If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-imageLayout-00011", + "text": " imageLayout must specify the layout of the image subresource ranges of image specified in pRanges at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-imageLayout-00012", + "text": " imageLayout must be either of VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-baseMipLevel-01474", + "text": " The VkImageSubresourceRange::baseMipLevel members of the elements of the pRanges array must each be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-01694", + "text": " For each VkImageSubresourceRange element of pRanges, if the levelCount member is not VK_REMAINING_MIP_LEVELS, then baseMipLevel + levelCount must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-baseArrayLayer-01476", + "text": " The VkImageSubresourceRange::baseArrayLayer members of the elements of the pRanges array must each be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-01695", + "text": " For each VkImageSubresourceRange element of pRanges, if the layerCount member is not VK_REMAINING_ARRAY_LAYERS, then baseArrayLayer + layerCount must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-image-00014", + "text": " image must have a depth/stencil format" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-imageLayout-parameter", + "text": " imageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-pDepthStencil-parameter", + "text": " pDepthStencil must be a valid pointer to a valid VkClearDepthStencilValue structure" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-pRanges-parameter", + "text": " pRanges must be a valid pointer to an array of rangeCount valid VkImageSubresourceRange structures" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-rangeCount-arraylength", + "text": " rangeCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-commonparent", + "text": " Both of commandBuffer, and image must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-01807", + "text": " If commandBuffer is an unprotected command buffer, then image must not be a protected image" + }, + { + "vuid": "VUID-vkCmdClearDepthStencilImage-commandBuffer-01808", + "text": " If commandBuffer is a protected command buffer, then image must not be an unprotected image" + } + ] + }, + "vkCmdClearAttachments": { + "core": [ + { + "vuid": "VUID-vkCmdClearAttachments-aspectMask-00015", + "text": " If the aspectMask member of any element of pAttachments contains VK_IMAGE_ASPECT_COLOR_BIT, the colorAttachment member of that element must refer to a valid color attachment in the current subpass" + }, + { + "vuid": "VUID-vkCmdClearAttachments-pRects-00016", + "text": " The rectangular region specified by each element of pRects must be contained within the render area of the current render pass instance" + }, + { + "vuid": "VUID-vkCmdClearAttachments-pRects-00017", + "text": " The layers specified by each element of pRects must be contained within every attachment that pAttachments refers to" + }, + { + "vuid": "VUID-vkCmdClearAttachments-layerCount-01934", + "text": " The layerCount member of each element of pRects must not be 0" + }, + { + "vuid": "VUID-vkCmdClearAttachments-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdClearAttachments-pAttachments-parameter", + "text": " pAttachments must be a valid pointer to an array of attachmentCount valid VkClearAttachment structures" + }, + { + "vuid": "VUID-vkCmdClearAttachments-pRects-parameter", + "text": " pRects must be a valid pointer to an array of rectCount VkClearRect structures" + }, + { + "vuid": "VUID-vkCmdClearAttachments-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdClearAttachments-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdClearAttachments-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdClearAttachments-attachmentCount-arraylength", + "text": " attachmentCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdClearAttachments-rectCount-arraylength", + "text": " rectCount must be greater than 0" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdClearAttachments-baseArrayLayer-00018", + "text": " If the render pass instance this is recorded in uses multiview, then baseArrayLayer must be zero and layerCount must be one." + } + ] + }, + "VkClearAttachment": { + "core": [ + { + "vuid": "VUID-VkClearAttachment-aspectMask-00019", + "text": " If aspectMask includes VK_IMAGE_ASPECT_COLOR_BIT, it must not include VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkClearAttachment-aspectMask-00020", + "text": " aspectMask must not include VK_IMAGE_ASPECT_METADATA_BIT" + }, + { + "vuid": "VUID-VkClearAttachment-clearValue-00021", + "text": " clearValue must be a valid VkClearValue union" + }, + { + "vuid": "VUID-VkClearAttachment-aspectMask-parameter", + "text": " aspectMask must be a valid combination of VkImageAspectFlagBits values" + }, + { + "vuid": "VUID-VkClearAttachment-aspectMask-requiredbitmask", + "text": " aspectMask must not be 0" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkClearAttachment-commandBuffer-01809", + "text": " If commandBuffer is an unprotected command buffer, then the attachment to be cleared must not be a protected image." + }, + { + "vuid": "VUID-VkClearAttachment-commandBuffer-01810", + "text": " If commandBuffer is a protected command buffer, then the attachment to be cleared must not be an unprotected image." + } + ] + }, + "VkClearDepthStencilValue": { + "(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-VkClearDepthStencilValue-depth-00022", + "text": " Unless the VK_EXT_depth_range_unrestricted extension is enabled depth must be between 0.0 and 1.0, inclusive" + } + ], + "!(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-VkClearDepthStencilValue-depth-00022", + "text": " depth must be between 0.0 and 1.0, inclusive" + } + ] + }, + "VkClearValue": { + "core": [ + { + "vuid": "VUID-VkClearValue-depthStencil-00023", + "text": " depthStencil must be a valid VkClearDepthStencilValue structure" + } + ] + }, + "vkCmdFillBuffer": { + "core": [ + { + "vuid": "VUID-vkCmdFillBuffer-dstOffset-00024", + "text": " dstOffset must be less than the size of dstBuffer" + }, + { + "vuid": "VUID-vkCmdFillBuffer-dstOffset-00025", + "text": " dstOffset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdFillBuffer-size-00026", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be greater than 0" + }, + { + "vuid": "VUID-vkCmdFillBuffer-size-00027", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be less than or equal to the size of dstBuffer minus dstOffset" + }, + { + "vuid": "VUID-vkCmdFillBuffer-size-00028", + "text": " If size is not equal to VK_WHOLE_SIZE, size must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdFillBuffer-dstBuffer-00029", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdFillBuffer-dstBuffer-00031", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdFillBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdFillBuffer-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdFillBuffer-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdFillBuffer-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics or compute operations" + }, + { + "vuid": "VUID-vkCmdFillBuffer-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdFillBuffer-commonparent", + "text": " Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkCmdFillBuffer-commandBuffer-00030", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics or compute operations" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdFillBuffer-commandBuffer-01811", + "text": " If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdFillBuffer-commandBuffer-01812", + "text": " If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer" + } + ] + }, + "vkCmdUpdateBuffer": { + "core": [ + { + "vuid": "VUID-vkCmdUpdateBuffer-dstOffset-00032", + "text": " dstOffset must be less than the size of dstBuffer" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dataSize-00033", + "text": " dataSize must be less than or equal to the size of dstBuffer minus dstOffset" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dstBuffer-00034", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dstBuffer-00035", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dstOffset-00036", + "text": " dstOffset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dataSize-00037", + "text": " dataSize must be less than or equal to 65536" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dataSize-00038", + "text": " dataSize must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-pData-parameter", + "text": " pData must be a valid pointer to an array of dataSize bytes" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-dataSize-arraylength", + "text": " dataSize must be greater than 0" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-commonparent", + "text": " Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-01813", + "text": " If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdUpdateBuffer-commandBuffer-01814", + "text": " If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer" + } + ] + }, + "vkCmdCopyBuffer": { + "core": [ + { + "vuid": "VUID-vkCmdCopyBuffer-size-00112", + "text": " The size member of each element of pRegions must be greater than 0" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-srcOffset-00113", + "text": " The srcOffset member of each element of pRegions must be less than the size of srcBuffer" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-dstOffset-00114", + "text": " The dstOffset member of each element of pRegions must be less than the size of dstBuffer" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-size-00115", + "text": " The size member of each element of pRegions must be less than or equal to the size of srcBuffer minus srcOffset" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-size-00116", + "text": " The size member of each element of pRegions must be less than or equal to the size of dstBuffer minus dstOffset" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-pRegions-00117", + "text": " The union of the source regions, and the union of the destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-srcBuffer-00118", + "text": " srcBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-srcBuffer-00119", + "text": " If srcBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-dstBuffer-00120", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-dstBuffer-00121", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-srcBuffer-parameter", + "text": " srcBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount VkBufferCopy structures" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-commonparent", + "text": " Each of commandBuffer, dstBuffer, and srcBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-01822", + "text": " If commandBuffer is an unprotected command buffer, then srcBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-01823", + "text": " If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyBuffer-commandBuffer-01824", + "text": " If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer" + } + ] + }, + "vkCmdCopyImage": { + "core": [ + { + "vuid": "VUID-vkCmdCopyImage-pRegions-00122", + "text": " The source region specified by each element of pRegions must be a region that is contained within srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-pRegions-00123", + "text": " The destination region specified by each element of pRegions must be a region that is contained within dstImage" + }, + { + "vuid": "VUID-vkCmdCopyImage-pRegions-00124", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-00126", + "text": " srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImageLayout-00128", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-00131", + "text": " dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImageLayout-00133", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-00136", + "text": " The sample count of srcImage and dstImage must match" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcSubresource-01696", + "text": " The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstSubresource-01697", + "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcSubresource-01698", + "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstSubresource-01699", + "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcOffset-01783", + "text": " The srcOffset and and extent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstOffset-01784", + "text": " The dstOffset and and extent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-vkCmdCopyImage-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdCopyImage-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkImageCopy structures" + }, + { + "vuid": "VUID-vkCmdCopyImage-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdCopyImage-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyImage-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdCopyImage-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdCopyImage-commonparent", + "text": " Each of commandBuffer, dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkCmdCopyImage-srcImage-00125", + "text": " srcImage must use a format that supports VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-00130", + "text": " dstImage must use a format that supports VK_FORMAT_FEATURE_TRANSFER_DST_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + } + ], + "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkCmdCopyImage-srcImage-00127", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-00132", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-00135", + "text": " The VkFormat of each of srcImage and dstImage must be compatible, as defined &amp;lt;&amp;lt;copies-images-format-compatibility, below&amp;gt;&amp;gt;" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01546", + "text": " If srcImage is non-sparse then the image or disjoint plane to be copied must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImage-01547", + "text": " If dstImage is non-sparse then the image or disjoint plane that is the destination of the copy must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-01548", + "text": " If the VkFormat of each of srcImage and dstImage is not a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt;, the VkFormat of each of srcImage and dstImage must be compatible, as defined &amp;lt;&amp;lt;copies-images-format-compatibility, below&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdCopyImage-None-01549", + "text": " In a copy to or from a plane of a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar image&amp;gt;&amp;gt;, the VkFormat of the image and plane must be compatible according to &amp;lt;&amp;lt;features-formats-compatible-planes,the description of compatible planes&amp;gt;&amp;gt; for the plane being copied" + }, + { + "vuid": "VUID-vkCmdCopyImage-aspectMask-01550", + "text": " When a copy is performed to or from an image with a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt;, the aspectMask of the srcSubresource and/or dstSubresource that refers to the multi-planar image must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for a VkFormat with three planes)" + } + ], + "!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdCopyImage-srcImageLayout-00129", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImageLayout-00134", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdCopyImage-srcImageLayout-01917", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + }, + { + "vuid": "VUID-vkCmdCopyImage-dstImageLayout-01395", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdCopyImage-commandBuffer-01825", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyImage-commandBuffer-01826", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyImage-commandBuffer-01827", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ] + }, + "VkImageCopy": { + "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageCopy-aspectMask-00137", + "text": " The aspectMask member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageCopy-srcOffset-00157", + "text": " If the calling command’s srcImage is a compressed image, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkImageCopy-extent-00158", + "text": " If the calling command’s srcImage is a compressed image, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the source image subresource width" + }, + { + "vuid": "VUID-VkImageCopy-extent-00159", + "text": " If the calling command’s srcImage is a compressed image, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the source image subresource height" + }, + { + "vuid": "VUID-VkImageCopy-extent-00160", + "text": " If the calling command’s srcImage is a compressed image, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the source image subresource depth" + }, + { + "vuid": "VUID-VkImageCopy-dstOffset-00162", + "text": " If the calling command’s dstImage is a compressed format image, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkImageCopy-extent-00163", + "text": " If the calling command’s dstImage is a compressed format image, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the destination image subresource width" + }, + { + "vuid": "VUID-VkImageCopy-extent-00164", + "text": " If the calling command’s dstImage is a compressed format image, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the destination image subresource height" + }, + { + "vuid": "VUID-VkImageCopy-extent-00165", + "text": " If the calling command’s dstImage is a compressed format image, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the destination image subresource depth" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkImageCopy-srcImage-01551", + "text": " If neither the calling command’s srcImage nor the calling command’s dstImage has a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion, multi-planar image format&amp;gt;&amp;gt; then the aspectMask member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01552", + "text": " If the calling command’s srcImage has a VkFormat with &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,two planes&amp;gt;&amp;gt; then the srcSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01553", + "text": " If the calling command’s srcImage has a VkFormat with &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,three planes&amp;gt;&amp;gt; then the srcSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01554", + "text": " If the calling command’s dstImage has a VkFormat with &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,two planes&amp;gt;&amp;gt; then the dstSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01555", + "text": " If the calling command’s dstImage has a VkFormat with &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,three planes&amp;gt;&amp;gt; then the dstSubresource aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01556", + "text": " If the calling command’s srcImage has a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar image format&amp;gt;&amp;gt; and the dstImage does not have a multi-planar image format, the dstSubresource aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01557", + "text": " If the calling command’s dstImage has a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar image format&amp;gt;&amp;gt; and the srcImage does not have a multi-planar image format, the srcSubresource aspectMask must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01727", + "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, all members of srcOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01728", + "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, extent.width must be a multiple of the compressed texel block width or (extent.width + srcOffset.x) must equal the source image subresource width" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01729", + "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, extent.height must be a multiple of the compressed texel block height or (extent.height + srcOffset.y) must equal the source image subresource height" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01730", + "text": " If the calling command’s srcImage is a compressed image, or a single-plane, “_422” image format, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + srcOffset.z) must equal the source image subresource depth" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01731", + "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, all members of dstOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01732", + "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, extent.width must be a multiple of the compressed texel block width or (extent.width + dstOffset.x) must equal the destination image subresource width" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01733", + "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, extent.height must be a multiple of the compressed texel block height or (extent.height + dstOffset.y) must equal the destination image subresource height" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01734", + "text": " If the calling command’s dstImage is a compressed format image, or a single-plane, “_422” image format, extent.depth must be a multiple of the compressed texel block depth or (extent.depth + dstOffset.z) must equal the destination image subresource depth" + } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkImageCopy-layerCount-00138", + "text": " The layerCount member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-00139", + "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01789", + "text": " If the calling command’s srcImage or dstImage is of type VK_IMAGE_TYPE_2D, then extent.depth must be 1." + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-VkImageCopy-extent-00140", + "text": " The number of slices of the extent (for 3D) or layers of the srcSubresource (for non-3D) must match the number of slices of the extent (for 3D) or layers of the dstSubresource (for non-3D)" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-00141", + "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of the corresponding subresource must be 0 and 1, respectively" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01790", + "text": " If both srcImage and dstImage are of type VK_IMAGE_TYPE_2D then then extent.depth must be 1." + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01791", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_2D, and the dstImage is of type VK_IMAGE_TYPE_3D, then extent.depth must equal to the layerCount member of srcSubresource." + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01792", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_2D, and the srcImage is of type VK_IMAGE_TYPE_3D, then extent.depth must equal to the layerCount member of dstSubresource." + } + ], + "core": [ + { + "vuid": "VUID-VkImageCopy-aspectMask-00142", + "text": " The aspectMask member of srcSubresource must specify aspects present in the calling command’s srcImage" + }, + { + "vuid": "VUID-VkImageCopy-aspectMask-00143", + "text": " The aspectMask member of dstSubresource must specify aspects present in the calling command’s dstImage" + }, + { + "vuid": "VUID-VkImageCopy-srcOffset-00144", + "text": " srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the source image subresource width" + }, + { + "vuid": "VUID-VkImageCopy-srcOffset-00145", + "text": " srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the source image subresource height" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-00146", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.y must be 0 and extent.height must be 1." + }, + { + "vuid": "VUID-VkImageCopy-srcOffset-00147", + "text": " srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the source image subresource depth" + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01785", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.z must be 0 and extent.depth must be 1." + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01786", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.z must be 0 and extent.depth must be 1." + }, + { + "vuid": "VUID-VkImageCopy-srcImage-01787", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_2D, then srcOffset.z must be 0." + }, + { + "vuid": "VUID-VkImageCopy-dstImage-01788", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_2D, then dstOffset.z must be 0." + }, + { + "vuid": "VUID-VkImageCopy-dstOffset-00150", + "text": " dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the destination image subresource width" + }, + { + "vuid": "VUID-VkImageCopy-dstOffset-00151", + "text": " dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the destination image subresource height" + }, + { + "vuid": "VUID-VkImageCopy-dstImage-00152", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.y must be 0 and extent.height must be 1." + }, + { + "vuid": "VUID-VkImageCopy-dstOffset-00153", + "text": " dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the destination image subresource depth" + }, + { + "vuid": "VUID-VkImageCopy-srcSubresource-parameter", + "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" + }, + { + "vuid": "VUID-VkImageCopy-dstSubresource-parameter", + "text": " dstSubresource must be a valid VkImageSubresourceLayers structure" + } + ] + }, + "VkImageSubresourceLayers": { + "core": [ + { + "vuid": "VUID-VkImageSubresourceLayers-aspectMask-00167", + "text": " If aspectMask contains VK_IMAGE_ASPECT_COLOR_BIT, it must not contain either of VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkImageSubresourceLayers-aspectMask-00168", + "text": " aspectMask must not contain VK_IMAGE_ASPECT_METADATA_BIT" + }, + { + "vuid": "VUID-VkImageSubresourceLayers-layerCount-01700", + "text": " layerCount must be greater than 0" + }, + { + "vuid": "VUID-VkImageSubresourceLayers-aspectMask-parameter", + "text": " aspectMask must be a valid combination of VkImageAspectFlagBits values" + }, + { + "vuid": "VUID-VkImageSubresourceLayers-aspectMask-requiredbitmask", + "text": " aspectMask must not be 0" + } + ] + }, + "vkCmdCopyBufferToImage": { + "core": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00171", + "text": " srcBuffer must be large enough to contain all buffer locations that are accessed according to &amp;lt;&amp;lt;copies-buffers-images-addressing,Buffer and Image Addressing&amp;gt;&amp;gt;, for each element of pRegions" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00172", + "text": " The image region specified by each element of pRegions must be a region that is contained within dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00173", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-srcBuffer-00174", + "text": " srcBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-srcBuffer-00176", + "text": " If srcBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-00177", + "text": " dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-00178", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-00179", + "text": " dstImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-00180", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-01701", + "text": " The imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-01702", + "text": " The imageSubresource.baseArrayLayer + imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-01793", + "text": " The imageOffset and and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-srcBuffer-parameter", + "text": " srcBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkBufferImageCopy structures" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-commonparent", + "text": " Each of commandBuffer, dstImage, and srcBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-00175", + "text": " dstImage must use a format that supports VK_FORMAT_FEATURE_TRANSFER_DST_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + } + ], + "!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-00181", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImageLayout-01396", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-01828", + "text": " If commandBuffer is an unprotected command buffer, then srcBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-01829", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-01830", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ] + }, + "vkCmdCopyImageToBuffer": { + "core": [ + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-00182", + "text": " The image region specified by each element of pRegions must be a region that is contained within srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-00183", + "text": " dstBuffer must be large enough to contain all buffer locations that are accessed according to &amp;lt;&amp;lt;copies-buffers-images-addressing,Buffer and Image Addressing&amp;gt;&amp;gt;, for each element of pRegions" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-00184", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00186", + "text": " srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00187", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00188", + "text": " srcImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00189", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-00191", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-00192", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-01703", + "text": " The imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-01704", + "text": " The imageSubresource.baseArrayLayer + imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-01794", + "text": " The imageOffset and and imageExtent members of each element of pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkBufferImageCopy structures" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commonparent", + "text": " Each of commandBuffer, dstBuffer, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1)": [ + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-00185", + "text": " srcImage must use a format that supports VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + } + ], + "!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-00190", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-01831", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-01832", + "text": " If commandBuffer is an unprotected command buffer, then dstBuffer must not be a protected buffer" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-01833", + "text": " If commandBuffer is a protected command buffer, then dstBuffer must not be an unprotected buffer" + } + ] + }, + "VkBufferImageCopy": { + "!(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkBufferImageCopy-bufferOffset-00193", + "text": " If the calling command’s VkImage parameter’s format is not a depth/stencil format, then bufferOffset must be a multiple of the format’s element size" + }, + { + "vuid": "VUID-VkBufferImageCopy-bufferRowLength-00203", + "text": " If the calling command’s VkImage parameter is a compressed image, bufferRowLength must be a multiple of the compressed texel block width" + }, + { + "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-00204", + "text": " If the calling command’s VkImage parameter is a compressed image, bufferImageHeight must be a multiple of the compressed texel block height" + }, + { + "vuid": "VUID-VkBufferImageCopy-imageOffset-00205", + "text": " If the calling command’s VkImage parameter is a compressed image, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkBufferImageCopy-bufferOffset-00206", + "text": " If the calling command’s VkImage parameter is a compressed image, bufferOffset must be a multiple of the compressed texel block size in bytes" + }, + { + "vuid": "VUID-VkBufferImageCopy-imageExtent-00207", + "text": " If the calling command’s VkImage parameter is a compressed image, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the image subresource width" + }, + { + "vuid": "VUID-VkBufferImageCopy-imageExtent-00208", + "text": " If the calling command’s VkImage parameter is a compressed image, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the image subresource height" + }, + { + "vuid": "VUID-VkBufferImageCopy-imageExtent-00209", + "text": " If the calling command’s VkImage parameter is a compressed image, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the image subresource depth" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkBufferImageCopy-bufferOffset-01558", + "text": " If the calling command’s VkImage parameter’s format is not a depth/stencil format or a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt;, then bufferOffset must be a multiple of the format’s element size" + }, + { + "vuid": "VUID-VkBufferImageCopy-bufferOffset-01559", + "text": " If the calling command’s VkImage parameter’s format is a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt;, then bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in &amp;lt;&amp;lt;features-formats-compatible-planes&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-01735", + "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, bufferRowLength must be a multiple of the compressed texel block width" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-01736", + "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, bufferImageHeight must be a multiple of the compressed texel block height" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-01737", + "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, all members of imageOffset must be a multiple of the corresponding dimensions of the compressed texel block" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-01738", + "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, bufferOffset must be a multiple of the compressed texel block size in bytes" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-01739", + "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, imageExtent.width must be a multiple of the compressed texel block width or (imageExtent.width + imageOffset.x) must equal the image subresource width" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-01740", + "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, imageExtent.height must be a multiple of the compressed texel block height or (imageExtent.height + imageOffset.y) must equal the image subresource height" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-01741", + "text": " If the calling command’s VkImage parameter is a compressed image, or a single-plane, “_422” image format, imageExtent.depth must be a multiple of the compressed texel block depth or (imageExtent.depth + imageOffset.z) must equal the image subresource depth" + }, + { + "vuid": "VUID-VkBufferImageCopy-aspectMask-01560", + "text": " If the calling command’s VkImage parameter’s format is a &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion,multi-planar format&amp;gt;&amp;gt;, then the aspectMask member of imageSubresource must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT (with VK_IMAGE_ASPECT_PLANE_2_BIT valid only for image formats with three planes)" + } + ], + "core": [ + { + "vuid": "VUID-VkBufferImageCopy-bufferOffset-00194", + "text": " bufferOffset must be a multiple of 4" + }, + { + "vuid": "VUID-VkBufferImageCopy-bufferRowLength-00195", + "text": " bufferRowLength must be 0, or greater than or equal to the width member of imageExtent" + }, + { + "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-00196", + "text": " bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent" + }, + { + "vuid": "VUID-VkBufferImageCopy-imageOffset-00197", + "text": " imageOffset.x and (imageExtent.width + imageOffset.x) must both be greater than or equal to 0 and less than or equal to the image subresource width" + }, + { + "vuid": "VUID-VkBufferImageCopy-imageOffset-00198", + "text": " imageOffset.y and (imageExtent.height + imageOffset.y) must both be greater than or equal to 0 and less than or equal to the image subresource height" + }, + { + "vuid": "VUID-VkBufferImageCopy-srcImage-00199", + "text": " If the calling command’s srcImage (vkCmdCopyImageToBuffer) or dstImage (vkCmdCopyBufferToImage) is of type VK_IMAGE_TYPE_1D, then imageOffset.y must be 0 and imageExtent.height must be 1." + }, + { + "vuid": "VUID-VkBufferImageCopy-imageOffset-00200", + "text": " imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the image subresource depth" + }, + { + "vuid": "VUID-VkBufferImageCopy-srcImage-00201", + "text": " If the calling command’s srcImage (vkCmdCopyImageToBuffer) or dstImage (vkCmdCopyBufferToImage) is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then imageOffset.z must be 0 and imageExtent.depth must be 1" + }, + { + "vuid": "VUID-VkBufferImageCopy-aspectMask-00211", + "text": " The aspectMask member of imageSubresource must specify aspects present in the calling command’s VkImage parameter" + }, + { + "vuid": "VUID-VkBufferImageCopy-aspectMask-00212", + "text": " The aspectMask member of imageSubresource must only have a single bit set" + }, + { + "vuid": "VUID-VkBufferImageCopy-baseArrayLayer-00213", + "text": " If the calling command’s VkImage parameter is of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of imageSubresource must be 0 and 1, respectively" + }, + { + "vuid": "VUID-VkBufferImageCopy-None-00214", + "text": " When copying to the depth aspect of an image subresource, the data in the source buffer must be in the range [0,1]" + }, + { + "vuid": "VUID-VkBufferImageCopy-imageSubresource-parameter", + "text": " imageSubresource must be a valid VkImageSubresourceLayers structure" + } + ] + }, + "vkCmdBlitImage": { + "core": [ + { + "vuid": "VUID-vkCmdBlitImage-pRegions-00215", + "text": " The source region specified by each element of pRegions must be a region that is contained within srcImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-pRegions-00216", + "text": " The destination region specified by each element of pRegions must be a region that is contained within dstImage" + }, + { + "vuid": "VUID-vkCmdBlitImage-pRegions-00217", + "text": " The union of all destination regions, specified by the elements of pRegions, must not overlap in memory with any texel that may be sampled during the blit operation" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00218", + "text": " srcImage must use a format that supports VK_FORMAT_FEATURE_BLIT_SRC_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00219", + "text": " srcImage must have been created with VK_IMAGE_USAGE_TRANSFER_SRC_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00220", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImageLayout-00221", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-00223", + "text": " dstImage must use a format that supports VK_FORMAT_FEATURE_BLIT_DST_BIT, which is indicated by VkFormatProperties::linearTilingFeatures (for linearly tiled images) or VkFormatProperties::optimalTilingFeatures (for optimally tiled images) - as returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-00224", + "text": " dstImage must have been created with VK_IMAGE_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-00225", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImageLayout-00226", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00228", + "text": " The sample count of srcImage and dstImage must both be equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00229", + "text": " If either of srcImage or dstImage was created with a signed integer VkFormat, the other must also have been created with a signed integer VkFormat" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00230", + "text": " If either of srcImage or dstImage was created with an unsigned integer VkFormat, the other must also have been created with an unsigned integer VkFormat" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00231", + "text": " If either of srcImage or dstImage was created with a depth/stencil format, the other must have exactly the same format" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00232", + "text": " If srcImage was created with a depth/stencil format, filter must be VK_FILTER_NEAREST" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-00233", + "text": " srcImage must have been created with a samples value of VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-00234", + "text": " dstImage must have been created with a samples value of VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdBlitImage-filter-00235", + "text": " If filter is VK_FILTER_LINEAR, srcImage must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcSubresource-01705", + "text": " The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstSubresource-01706", + "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcSubresource-01707", + "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstSubresource-01708", + "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdBlitImage-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdBlitImage-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdBlitImage-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkImageBlit structures" + }, + { + "vuid": "VUID-vkCmdBlitImage-filter-parameter", + "text": " filter must be a valid VkFilter value" + }, + { + "vuid": "VUID-vkCmdBlitImage-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBlitImage-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBlitImage-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdBlitImage-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdBlitImage-commonparent", + "text": " Each of commandBuffer, dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-vkCmdBlitImage-srcImage-01561", + "text": " srcImage must not use a format listed in &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImage-01562", + "text": " dstImage must not use a format listed in &amp;lt;&amp;lt;features-formats-requiring-sampler-ycbcr-conversion&amp;gt;&amp;gt;" + } + ], + "!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdBlitImage-srcImageLayout-00222", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImageLayout-00227", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdBlitImage-srcImageLayout-01398", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-vkCmdBlitImage-dstImageLayout-01399", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-vkCmdBlitImage-filter-00236", + "text": " If filter is VK_FILTER_CUBIC_IMG, srcImage must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdBlitImage-filter-00237", + "text": " If filter is VK_FILTER_CUBIC_IMG, srcImage must have a VkImageType of VK_IMAGE_TYPE_3D" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdBlitImage-commandBuffer-01834", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdBlitImage-commandBuffer-01835", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdBlitImage-commandBuffer-01836", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ] + }, + "VkImageBlit": { + "core": [ + { + "vuid": "VUID-VkImageBlit-aspectMask-00238", + "text": " The aspectMask member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageBlit-layerCount-00239", + "text": " The layerCount member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageBlit-srcImage-00240", + "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively" + }, + { + "vuid": "VUID-VkImageBlit-aspectMask-00241", + "text": " The aspectMask member of srcSubresource must specify aspects present in the calling command’s srcImage" + }, + { + "vuid": "VUID-VkImageBlit-aspectMask-00242", + "text": " The aspectMask member of dstSubresource must specify aspects present in the calling command’s dstImage" + }, + { + "vuid": "VUID-VkImageBlit-srcOffset-00243", + "text": " srcOffset[0].x and srcOffset[1].x must both be greater than or equal to 0 and less than or equal to the source image subresource width" + }, + { + "vuid": "VUID-VkImageBlit-srcOffset-00244", + "text": " srcOffset[0].y and srcOffset[1].y must both be greater than or equal to 0 and less than or equal to the source image subresource height" + }, + { + "vuid": "VUID-VkImageBlit-srcImage-00245", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset[0].y must be 0 and srcOffset[1].y must be 1." + }, + { + "vuid": "VUID-VkImageBlit-srcOffset-00246", + "text": " srcOffset[0].z and srcOffset[1].z must both be greater than or equal to 0 and less than or equal to the source image subresource depth" + }, + { + "vuid": "VUID-VkImageBlit-srcImage-00247", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then srcOffset[0].z must be 0 and srcOffset[1].z must be 1." + }, + { + "vuid": "VUID-VkImageBlit-dstOffset-00248", + "text": " dstOffset[0].x and dstOffset[1].x must both be greater than or equal to 0 and less than or equal to the destination image subresource width" + }, + { + "vuid": "VUID-VkImageBlit-dstOffset-00249", + "text": " dstOffset[0].y and dstOffset[1].y must both be greater than or equal to 0 and less than or equal to the destination image subresource height" + }, + { + "vuid": "VUID-VkImageBlit-dstImage-00250", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset[0].y must be 0 and dstOffset[1].y must be 1." + }, + { + "vuid": "VUID-VkImageBlit-dstOffset-00251", + "text": " dstOffset[0].z and dstOffset[1].z must both be greater than or equal to 0 and less than or equal to the destination image subresource depth" + }, + { + "vuid": "VUID-VkImageBlit-dstImage-00252", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then dstOffset[0].z must be 0 and dstOffset[1].z must be 1." + }, + { + "vuid": "VUID-VkImageBlit-srcSubresource-parameter", + "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" + }, + { + "vuid": "VUID-VkImageBlit-dstSubresource-parameter", + "text": " dstSubresource must be a valid VkImageSubresourceLayers structure" + } + ] + }, + "vkCmdResolveImage": { + "core": [ + { + "vuid": "VUID-vkCmdResolveImage-pRegions-00253", + "text": " The source region specified by each element of pRegions must be a region that is contained within srcImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-pRegions-00254", + "text": " The destination region specified by each element of pRegions must be a region that is contained within dstImage" + }, + { + "vuid": "VUID-vkCmdResolveImage-pRegions-00255", + "text": " The union of all source regions, and the union of all destination regions, specified by the elements of pRegions, must not overlap in memory" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-00256", + "text": " If srcImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-00257", + "text": " srcImage must have a sample count equal to any valid sample count value other than VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImage-00258", + "text": " If dstImage is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImage-00259", + "text": " dstImage must have a sample count equal to VK_SAMPLE_COUNT_1_BIT" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImageLayout-00260", + "text": " srcImageLayout must specify the layout of the image subresources of srcImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImageLayout-00262", + "text": " dstImageLayout must specify the layout of the image subresources of dstImage specified in pRegions at the time this command is executed on a VkDevice" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImage-00264", + "text": " If dstImage was created with tiling equal to VK_IMAGE_TILING_LINEAR, dstImage must have been created with a format that supports being a color attachment, as specified by the VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT flag in VkFormatProperties::linearTilingFeatures returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImage-00265", + "text": " If dstImage was created with tiling equal to VK_IMAGE_TILING_OPTIMAL, dstImage must have been created with a format that supports being a color attachment, as specified by the VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT flag in VkFormatProperties::optimalTilingFeatures returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-01386", + "text": " srcImage and dstImage must have been created with the same image format" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcSubresource-01709", + "text": " The srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstSubresource-01710", + "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcSubresource-01711", + "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstSubresource-01712", + "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + }, + { + "vuid": "VUID-vkCmdResolveImage-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImage-parameter", + "text": " srcImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdResolveImage-srcImageLayout-parameter", + "text": " srcImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImage-parameter", + "text": " dstImage must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImageLayout-parameter", + "text": " dstImageLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-vkCmdResolveImage-pRegions-parameter", + "text": " pRegions must be a valid pointer to an array of regionCount valid VkImageResolve structures" + }, + { + "vuid": "VUID-vkCmdResolveImage-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdResolveImage-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdResolveImage-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdResolveImage-regionCount-arraylength", + "text": " regionCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdResolveImage-commonparent", + "text": " Each of commandBuffer, dstImage, and srcImage must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdResolveImage-srcImageLayout-00261", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImageLayout-00263", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkCmdResolveImage-srcImageLayout-01400", + "text": " srcImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + }, + { + "vuid": "VUID-vkCmdResolveImage-dstImageLayout-01401", + "text": " dstImageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdResolveImage-commandBuffer-01837", + "text": " If commandBuffer is an unprotected command buffer, then srcImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdResolveImage-commandBuffer-01838", + "text": " If commandBuffer is an unprotected command buffer, then dstImage must not be a protected image" + }, + { + "vuid": "VUID-vkCmdResolveImage-commandBuffer-01839", + "text": " If commandBuffer is a protected command buffer, then dstImage must not be an unprotected image" + } + ] + }, + "VkImageResolve": { + "core": [ + { + "vuid": "VUID-VkImageResolve-aspectMask-00266", + "text": " The aspectMask member of srcSubresource and dstSubresource must only contain VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkImageResolve-layerCount-00267", + "text": " The layerCount member of srcSubresource and dstSubresource must match" + }, + { + "vuid": "VUID-VkImageResolve-srcImage-00268", + "text": " If either of the calling command’s srcImage or dstImage parameters are of VkImageType VK_IMAGE_TYPE_3D, the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively" + }, + { + "vuid": "VUID-VkImageResolve-srcOffset-00269", + "text": " srcOffset.x and (extent.width + srcOffset.x) must both be greater than or equal to 0 and less than or equal to the source image subresource width" + }, + { + "vuid": "VUID-VkImageResolve-srcOffset-00270", + "text": " srcOffset.y and (extent.height + srcOffset.y) must both be greater than or equal to 0 and less than or equal to the source image subresource height" + }, + { + "vuid": "VUID-VkImageResolve-srcImage-00271", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D, then srcOffset.y must be 0 and extent.height must be 1." + }, + { + "vuid": "VUID-VkImageResolve-srcOffset-00272", + "text": " srcOffset.z and (extent.depth + srcOffset.z) must both be greater than or equal to 0 and less than or equal to the source image subresource depth" + }, + { + "vuid": "VUID-VkImageResolve-srcImage-00273", + "text": " If the calling command’s srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then srcOffset.z must be 0 and extent.depth must be 1." + }, + { + "vuid": "VUID-VkImageResolve-dstOffset-00274", + "text": " dstOffset.x and (extent.width + dstOffset.x) must both be greater than or equal to 0 and less than or equal to the destination image subresource width" + }, + { + "vuid": "VUID-VkImageResolve-dstOffset-00275", + "text": " dstOffset.y and (extent.height + dstOffset.y) must both be greater than or equal to 0 and less than or equal to the destination image subresource height" + }, + { + "vuid": "VUID-VkImageResolve-dstImage-00276", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D, then dstOffset.y must be 0 and extent.height must be 1." + }, + { + "vuid": "VUID-VkImageResolve-dstOffset-00277", + "text": " dstOffset.z and (extent.depth + dstOffset.z) must both be greater than or equal to 0 and less than or equal to the destination image subresource depth" + }, + { + "vuid": "VUID-VkImageResolve-dstImage-00278", + "text": " If the calling command’s dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then dstOffset.z must be 0 and extent.depth must be 1." + }, + { + "vuid": "VUID-VkImageResolve-srcSubresource-parameter", + "text": " srcSubresource must be a valid VkImageSubresourceLayers structure" + }, + { + "vuid": "VUID-VkImageResolve-dstSubresource-parameter", + "text": " dstSubresource must be a valid VkImageSubresourceLayers structure" + } + ] + }, + "vkCmdWriteBufferMarkerAMD": { + "(VK_AMD_buffer_marker)": [ + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01798", + "text": " dstOffset must be less than or equal to the size of dstBuffer minus 4." + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01799", + "text": " dstBuffer must have been created with VK_BUFFER_USAGE_TRANSFER_DST_BIT usage flag" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-01800", + "text": " If dstBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstOffset-01801", + "text": " dstOffset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-pipelineStage-parameter", + "text": " pipelineStage must be a valid VkPipelineStageFlagBits value" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-dstBuffer-parameter", + "text": " dstBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdWriteBufferMarkerAMD-commonparent", + "text": " Both of commandBuffer, and dstBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkPipelineInputAssemblyStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428", + "text": " If topology is VK_PRIMITIVE_TOPOLOGY_POINT_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY or VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, primitiveRestartEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429", + "text": " If the &amp;lt;&amp;lt;features-features-geometryShader,geometry shaders&amp;gt;&amp;gt; feature is not enabled, topology must not be any of VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY or VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY" + }, + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00430", + "text": " If the &amp;lt;&amp;lt;features-features-tessellationShader,tessellation shaders&amp;gt;&amp;gt; feature is not enabled, topology must not be VK_PRIMITIVE_TOPOLOGY_PATCH_LIST" + }, + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-parameter", + "text": " topology must be a valid VkPrimitiveTopology value" + } + ] + }, + "vkCmdBindIndexBuffer": { + "core": [ + { + "vuid": "VUID-vkCmdBindIndexBuffer-offset-00431", + "text": " offset must be less than the size of buffer" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-offset-00432", + "text": " The sum of offset and the address of the range of VkDeviceMemory object that is backing buffer, must be a multiple of the type indicated by indexType" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-buffer-00433", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDEX_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-buffer-00434", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer-commonparent", + "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdDraw": { + "core": [ + { + "vuid": "VUID-vkCmdDraw-renderPass-00435", + "text": " The current render pass must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDraw-subpass-00436", + "text": " The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDraw-None-00437", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDraw-None-00438", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDraw-None-00439", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDraw-None-00440", + "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have valid buffers bound" + }, + { + "vuid": "VUID-vkCmdDraw-None-00441", + "text": " For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in &amp;lt;&amp;lt;fxvertex-input&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDraw-None-00442", + "text": " A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-vkCmdDraw-None-00443", + "text": " If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer" + }, + { + "vuid": "VUID-vkCmdDraw-None-00444", + "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" + }, + { + "vuid": "VUID-vkCmdDraw-None-00445", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDraw-None-00446", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDraw-None-00447", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDraw-None-00448", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDraw-None-00449", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDraw-linearTilingFeatures-00450", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDraw-None-01499", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command." + }, + { + "vuid": "VUID-vkCmdDraw-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDraw-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDraw-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdDraw-renderpass", + "text": " This command must only be called inside of a render pass instance" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDraw-linearTilingFeatures-00451", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDraw-None-00452", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDraw-maxMultiviewInstanceIndex-00453", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex." + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDraw-commandBuffer-01850", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDraw-commandBuffer-01851", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDraw-commandBuffer-01852", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ], + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDraw-sampleLocationsEnable-01512", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ] + }, + "vkCmdDrawIndexed": { + "core": [ + { + "vuid": "VUID-vkCmdDrawIndexed-renderPass-00454", + "text": " The current render pass must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-subpass-00455", + "text": " The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00456", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00457", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00458", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00459", + "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have valid buffers bound" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00460", + "text": " For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in &amp;lt;&amp;lt;fxvertex-input&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00461", + "text": " A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00462", + "text": " If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-indexSize-00463", + "text": " (indexSize * (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00464", + "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00465", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00466", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00467", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00468", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00469", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-linearTilingFeatures-00470", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-01500", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-renderpass", + "text": " This command must only be called inside of a render pass instance" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-linearTilingFeatures-00471", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-00472", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-maxMultiviewInstanceIndex-00473", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex." + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-01853", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-01854", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-01855", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ], + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndexed-sampleLocationsEnable-01513", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ] + }, + "vkCmdDrawIndirect": { + "core": [ + { + "vuid": "VUID-vkCmdDrawIndirect-buffer-00474", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-buffer-01660", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-offset-00475", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-drawCount-00476", + "text": " If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand)" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-drawCount-00477", + "text": " If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-firstInstance-00478", + "text": " If the &amp;lt;&amp;lt;features-features-drawIndirectFirstInstance,drawIndirectFirstInstance&amp;gt;&amp;gt; feature is not enabled, all the firstInstance members of the VkDrawIndirectCommand structures accessed by this command must be 0" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-renderPass-00479", + "text": " The current render pass must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-subpass-00480", + "text": " The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00481", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00482", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00483", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00484", + "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have valid buffers bound" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00485", + "text": " A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00486", + "text": " If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-drawCount-00487", + "text": " If drawCount is equal to 1, (offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-drawCount-00488", + "text": " If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-drawCount-00489", + "text": " drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00490", + "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00491", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00492", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00493", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00494", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00495", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-linearTilingFeatures-00496", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-01501", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-commonparent", + "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-linearTilingFeatures-00497", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-00498", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-maxMultiviewInstanceIndex-00499", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex." + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-01856", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-01857", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndirect-commandBuffer-01858", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ], + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndirect-sampleLocationsEnable-01514", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ] + }, + "VkDrawIndirectCommand": { + "core": [ + { + "vuid": "VUID-VkDrawIndirectCommand-None-00500", + "text": " For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in &amp;lt;&amp;lt;fxvertex-input&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkDrawIndirectCommand-firstInstance-00501", + "text": " If the &amp;lt;&amp;lt;features-features-drawIndirectFirstInstance,drawIndirectFirstInstance&amp;gt;&amp;gt; feature is not enabled, firstInstance must be 0" + } + ] + }, + "vkCmdDrawIndirectCountAMD": { + "(VK_AMD_draw_indirect_count)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-buffer-01661", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-buffer-01662", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-countBuffer-01663", + "text": " If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-countBuffer-01664", + "text": " countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-offset-00502", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-countBufferOffset-00503", + "text": " countBufferOffset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-stride-00504", + "text": " stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand)" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-maxDrawCount-00505", + "text": " If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-firstInstance-00506", + "text": " If the &amp;lt;&amp;lt;features-features-drawIndirectFirstInstance,drawIndirectFirstInstance&amp;gt;&amp;gt; feature is not enabled, all the firstInstance members of the VkDrawIndirectCommand structures accessed by this command must be 0" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-renderPass-00507", + "text": " The current render pass must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-subpass-00508", + "text": " The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00509", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00510", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00511", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00512", + "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have valid buffers bound" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00513", + "text": " A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00514", + "text": " If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-countBuffer-00515", + "text": " If the count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-countBuffer-00516", + "text": " If the count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-countBuffer-00517", + "text": " The count stored in countBuffer must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00518", + "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00519", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00520", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00521", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00522", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-00523", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-linearTilingFeatures-00524", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-None-01502", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-countBuffer-parameter", + "text": " countBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-commonparent", + "text": " Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_AMD_draw_indirect_count)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-maxMultiviewInstanceIndex-00525", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex." + } + ], + "(VK_AMD_draw_indirect_count)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-commandBuffer-01859", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-commandBuffer-01860", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-commandBuffer-01861", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ], + "(VK_AMD_draw_indirect_count)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndirectCountAMD-sampleLocationsEnable-01515", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ] + }, + "vkCmdDrawIndexedIndirect": { + "core": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-buffer-00526", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-buffer-01665", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-offset-00527", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00528", + "text": " If drawCount is greater than 1, stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndexedIndirectCommand)" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00529", + "text": " If the multi-draw indirect feature is not enabled, drawCount must be 0 or 1" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-firstInstance-00530", + "text": " If the &amp;lt;&amp;lt;features-features-drawIndirectFirstInstance,drawIndirectFirstInstance&amp;gt;&amp;gt; feature is not enabled, all the firstInstance members of the VkDrawIndexedIndirectCommand structures accessed by this command must be 0" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-renderPass-00531", + "text": " The current render pass must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-subpass-00532", + "text": " The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00533", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00534", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00535", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00536", + "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have valid buffers bound" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00537", + "text": " A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00538", + "text": " If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00539", + "text": " If drawCount is equal to 1, (offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00540", + "text": " If drawCount is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-drawCount-00541", + "text": " drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00542", + "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00543", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00544", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00545", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00546", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00547", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-linearTilingFeatures-00548", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-01503", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-commonparent", + "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-linearTilingFeatures-00549", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-00550", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-maxMultiviewInstanceIndex-00551", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex." + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-01862", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-01863", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-commandBuffer-01864", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ], + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-sampleLocationsEnable-01516", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ] + }, + "VkDrawIndexedIndirectCommand": { + "core": [ + { + "vuid": "VUID-VkDrawIndexedIndirectCommand-None-00552", + "text": " For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in &amp;lt;&amp;lt;fxvertex-input&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkDrawIndexedIndirectCommand-indexSize-00553", + "text": " (indexSize * (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer" + }, + { + "vuid": "VUID-VkDrawIndexedIndirectCommand-firstInstance-00554", + "text": " If the &amp;lt;&amp;lt;features-features-drawIndirectFirstInstance,drawIndirectFirstInstance&amp;gt;&amp;gt; feature is not enabled, firstInstance must be 0" + } + ] + }, + "vkCmdDrawIndexedIndirectCountAMD": { + "(VK_AMD_draw_indirect_count)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-01666", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-01667", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-01668", + "text": " If countBuffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-01669", + "text": " countBuffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-offset-00555", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-countBufferOffset-00556", + "text": " countBufferOffset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-stride-00557", + "text": " stride must be a multiple of 4 and must be greater than or equal to sizeof(VkDrawIndirectCommand)" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-maxDrawCount-00558", + "text": " If maxDrawCount is greater than or equal to 1, (stride {times} (maxDrawCount - 1) + offset + sizeof(VkDrawIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-firstInstance-00559", + "text": " If the drawIndirectFirstInstance feature is not enabled, all the firstInstance members of the VkDrawIndexedIndirectCommand structures accessed by this command must be 0" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-renderPass-00560", + "text": " The current render pass must be &amp;lt;&amp;lt;renderpass-compatibility,compatible&amp;gt;&amp;gt; with the renderPass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-subpass-00561", + "text": " The subpass index of the current render pass must be equal to the subpass member of the VkGraphicsPipelineCreateInfo structure specified when creating the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00562", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00563", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS, a push constant value must have been set for VK_PIPELINE_BIND_POINT_GRAPHICS, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00564", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00565", + "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have valid buffers bound" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00566", + "text": " A valid graphics pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_GRAPHICS" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00567", + "text": " If the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS requires any dynamic state, that state must have been set on the current command buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-00568", + "text": " If count stored in countBuffer is equal to 1, (offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-00569", + "text": " If count stored in countBuffer is greater than 1, (stride {times} (drawCount - 1) + offset + sizeof(VkDrawIndexedIndirectCommand)) must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-drawCount-00570", + "text": " drawCount must be less than or equal to VkPhysicalDeviceLimits::maxDrawIndirectCount" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00571", + "text": " Every input attachment used by the current subpass must be bound to the pipeline via a descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00572", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00573", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00574", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_GRAPHICS uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00575", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-00576", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-linearTilingFeatures-00577", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-None-01504", + "text": " Image subresources used as attachments in the current render pass must not be accessed in any way other than as an attachment by this command." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-countBuffer-parameter", + "text": " countBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-commonparent", + "text": " Each of buffer, commandBuffer, and countBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_AMD_draw_indirect_count)+(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-maxMultiviewInstanceIndex-00578", + "text": " If the draw is recorded in a render pass instance with multiview enabled, the maximum instance index must be less than or equal to VkPhysicalDeviceMultiviewProperties::maxMultiviewInstanceIndex." + } + ], + "(VK_AMD_draw_indirect_count)+(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-01865", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-01866", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-commandBuffer-01867", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the framebuffer-space pipeline stages in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_GRAPHICS reads from or writes to any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ], + "(VK_AMD_draw_indirect_count)+(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCountAMD-sampleLocationsEnable-01517", + "text": " If the bound graphics pipeline was created with VkPipelineSampleLocationsStateCreateInfoEXT::sampleLocationsEnable set to VK_TRUE and the current subpass has a depth/stencil attachment, then that attachment must have been created with the VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT bit set" + } + ] + }, + "VkPipelineVertexInputStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-vertexBindingDescriptionCount-00613", + "text": " vertexBindingDescriptionCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-vertexAttributeDescriptionCount-00614", + "text": " vertexAttributeDescriptionCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributes" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-binding-00615", + "text": " For every binding specified by each element of pVertexAttributeDescriptions, a VkVertexInputBindingDescription must exist in pVertexBindingDescriptions with the same value of binding" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-00616", + "text": " All elements of pVertexBindingDescriptions must describe distinct binding numbers" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-00617", + "text": " All elements of pVertexAttributeDescriptions must describe distinct attribute locations" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineVertexInputDivisorStateCreateInfoEXT" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexBindingDescriptions-parameter", + "text": " If vertexBindingDescriptionCount is not 0, pVertexBindingDescriptions must be a valid pointer to an array of vertexBindingDescriptionCount valid VkVertexInputBindingDescription structures" + }, + { + "vuid": "VUID-VkPipelineVertexInputStateCreateInfo-pVertexAttributeDescriptions-parameter", + "text": " If vertexAttributeDescriptionCount is not 0, pVertexAttributeDescriptions must be a valid pointer to an array of vertexAttributeDescriptionCount valid VkVertexInputAttributeDescription structures" + } + ] + }, + "VkVertexInputBindingDescription": { + "core": [ + { + "vuid": "VUID-VkVertexInputBindingDescription-binding-00618", + "text": " binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription-stride-00619", + "text": " stride must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindingStride" + }, + { + "vuid": "VUID-VkVertexInputBindingDescription-inputRate-parameter", + "text": " inputRate must be a valid VkVertexInputRate value" + } + ] + }, + "VkVertexInputAttributeDescription": { + "core": [ + { + "vuid": "VUID-VkVertexInputAttributeDescription-location-00620", + "text": " location must be less than VkPhysicalDeviceLimits::maxVertexInputAttributes" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription-binding-00621", + "text": " binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription-offset-00622", + "text": " offset must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputAttributeOffset" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription-format-00623", + "text": " format must be allowed as a vertex buffer format, as specified by the VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT flag in VkFormatProperties::bufferFeatures returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-VkVertexInputAttributeDescription-format-parameter", + "text": " format must be a valid VkFormat value" + } + ] + }, + "vkCmdBindVertexBuffers": { + "core": [ + { + "vuid": "VUID-vkCmdBindVertexBuffers-firstBinding-00624", + "text": " firstBinding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-firstBinding-00625", + "text": " The sum of firstBinding and bindingCount must be less than or equal to VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-pOffsets-00626", + "text": " All elements of pOffsets must be less than the size of the corresponding element in pBuffers" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-pBuffers-00627", + "text": " All elements of pBuffers must have been created with the VK_BUFFER_USAGE_VERTEX_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-pBuffers-00628", + "text": " Each element of pBuffers that is non-sparse must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-pBuffers-parameter", + "text": " pBuffers must be a valid pointer to an array of bindingCount valid VkBuffer handles" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-pOffsets-parameter", + "text": " pOffsets must be a valid pointer to an array of bindingCount VkDeviceSize values" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-bindingCount-arraylength", + "text": " bindingCount must be greater than 0" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers-commonparent", + "text": " Both of commandBuffer, and the elements of pBuffers must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkPipelineVertexInputDivisorStateCreateInfoEXT": { + "(VK_EXT_vertex_attribute_divisor)": [ + { + "vuid": "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-pVertexBindingDivisors-parameter", + "text": " pVertexBindingDivisors must be a valid pointer to an array of vertexBindingDivisorCount VkVertexInputBindingDivisorDescriptionEXT structures" + }, + { + "vuid": "VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength", + "text": " vertexBindingDivisorCount must be greater than 0" + } + ] + }, + "VkVertexInputBindingDivisorDescriptionEXT": { + "(VK_EXT_vertex_attribute_divisor)": [ + { + "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-binding-01869", + "text": " binding must be less than VkPhysicalDeviceLimits::maxVertexInputBindings" + }, + { + "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870", + "text": " divisor must be a value between 0 and VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT::maxVertexAttribDivisor, inclusive." + }, + { + "vuid": "VUID-VkVertexInputBindingDivisorDescriptionEXT-inputRate-01871", + "text": " VkVertexInputBindingDescription::inputRate must be of type VK_VERTEX_INPUT_RATE_INSTANCE for this binding." + } + ] + }, + "VkPipelineTessellationStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineTessellationStateCreateInfo-patchControlPoints-01214", + "text": " patchControlPoints must be greater than zero and less than or equal to VkPhysicalDeviceLimits::maxTessellationPatchSize" + }, + { + "vuid": "VUID-VkPipelineTessellationStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineTessellationStateCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineTessellationDomainOriginStateCreateInfo" + }, + { + "vuid": "VUID-VkPipelineTessellationStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "VkPipelineTessellationDomainOriginStateCreateInfo": { + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkPipelineTessellationDomainOriginStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineTessellationDomainOriginStateCreateInfo-domainOrigin-parameter", + "text": " domainOrigin must be a valid VkTessellationDomainOrigin value" + } + ] + }, + "VkPipelineViewportSwizzleStateCreateInfoNV": { + "(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-01215", + "text": " viewportCount must match the viewportCount set in VkPipelineViewportStateCreateInfo" + }, + { + "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineViewportSwizzleStateCreateInfoNV-viewportCount-arraylength", + "text": " viewportCount must be greater than 0" + } + ] + }, + "VkViewportSwizzleNV": { + "(VK_NV_viewport_swizzle)": [ + { + "vuid": "VUID-VkViewportSwizzleNV-x-parameter", + "text": " x must be a valid VkViewportCoordinateSwizzleNV value" + }, + { + "vuid": "VUID-VkViewportSwizzleNV-y-parameter", + "text": " y must be a valid VkViewportCoordinateSwizzleNV value" + }, + { + "vuid": "VUID-VkViewportSwizzleNV-z-parameter", + "text": " z must be a valid VkViewportCoordinateSwizzleNV value" + }, + { + "vuid": "VUID-VkViewportSwizzleNV-w-parameter", + "text": " w must be a valid VkViewportCoordinateSwizzleNV value" + } + ] + }, + "VkPipelineViewportWScalingStateCreateInfoNV": { + "(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-VkPipelineViewportWScalingStateCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkPipelineViewportWScalingStateCreateInfoNV-viewportCount-arraylength", + "text": " viewportCount must be greater than 0" + } + ] + }, + "vkCmdSetViewportWScalingNV": { + "(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-None-01322", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-firstViewport-01323", + "text": " firstViewport must be less than VkPhysicalDeviceLimits::maxViewports" + }, + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-firstViewport-01324", + "text": " The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" + }, + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-pViewportWScalings-parameter", + "text": " pViewportWScalings must be a valid pointer to an array of viewportCount VkViewportWScalingNV structures" + }, + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdSetViewportWScalingNV-viewportCount-arraylength", + "text": " viewportCount must be greater than 0" + } + ] + }, + "VkPipelineViewportStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01216", + "text": " If the multiple viewports feature is not enabled, viewportCount must be 1" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01217", + "text": " If the multiple viewports feature is not enabled, scissorCount must be 1" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-01218", + "text": " viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01219", + "text": " scissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-01220", + "text": " scissorCount and viewportCount must be identical" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineViewportSwizzleStateCreateInfoNV or VkPipelineViewportWScalingStateCreateInfoNV" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportCount-arraylength", + "text": " viewportCount must be greater than 0" + }, + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-scissorCount-arraylength", + "text": " scissorCount must be greater than 0" + } + ], + "(VK_NV_clip_space_w_scaling)": [ + { + "vuid": "VUID-VkPipelineViewportStateCreateInfo-viewportWScalingEnable-01726", + "text": " If the viewportWScalingEnable member of a VkPipelineViewportWScalingStateCreateInfoNV structure chained to the pNext chain is VK_TRUE, the viewportCount member of the VkPipelineViewportWScalingStateCreateInfoNV structure must be equal to viewportCount" + } + ] + }, + "vkCmdSetViewport": { + "core": [ + { + "vuid": "VUID-vkCmdSetViewport-None-01221", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_VIEWPORT dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetViewport-firstViewport-01222", + "text": " firstViewport must be less than VkPhysicalDeviceLimits::maxViewports" + }, + { + "vuid": "VUID-vkCmdSetViewport-firstViewport-01223", + "text": " The sum of firstViewport and viewportCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" + }, + { + "vuid": "VUID-vkCmdSetViewport-firstViewport-01224", + "text": " If the multiple viewports feature is not enabled, firstViewport must be 0" + }, + { + "vuid": "VUID-vkCmdSetViewport-viewportCount-01225", + "text": " If the multiple viewports feature is not enabled, viewportCount must be 1" + }, + { + "vuid": "VUID-vkCmdSetViewport-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetViewport-pViewports-parameter", + "text": " pViewports must be a valid pointer to an array of viewportCount VkViewport structures" + }, + { + "vuid": "VUID-vkCmdSetViewport-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetViewport-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdSetViewport-viewportCount-arraylength", + "text": " viewportCount must be greater than 0" + } + ] + }, + "VkViewport": { + "core": [ + { + "vuid": "VUID-VkViewport-width-01770", + "text": " width must be greater than 0.0" + }, + { + "vuid": "VUID-VkViewport-width-01771", + "text": " width must be less than or equal to VkPhysicalDeviceLimits::maxViewportDimensions[0]" + }, + { + "vuid": "VUID-VkViewport-height-01773", + "text": " The absolute value of height must be less than or equal to VkPhysicalDeviceLimits::maxViewportDimensions[1]" + }, + { + "vuid": "VUID-VkViewport-x-01774", + "text": " x must be greater than or equal to viewportBoundsRange[0]" + }, + { + "vuid": "VUID-VkViewport-x-01232", + "text": " (x + width) must be less than or equal to viewportBoundsRange[1]" + }, + { + "vuid": "VUID-VkViewport-y-01775", + "text": " y must be greater than or equal to viewportBoundsRange[0]" + }, + { + "vuid": "VUID-VkViewport-y-01233", + "text": " (y + height) must be less than or equal to viewportBoundsRange[1]" + } + ], + "!(VK_VERSION_1_1,VK_KHR_maintenance1,VK_AMD_negative_viewport_height)": [ + { + "vuid": "VUID-VkViewport-height-01772", + "text": " height must be greater than 0.0" + } + ], + "(VK_VERSION_1_1,VK_KHR_maintenance1,VK_AMD_negative_viewport_height)": [ + { + "vuid": "VUID-VkViewport-y-01776", + "text": " y must be less than or equal to viewportBoundsRange[1]" + }, + { + "vuid": "VUID-VkViewport-y-01777", + "text": " (y + height) must be greater than or equal to viewportBoundsRange[0]" + } + ], + "(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-VkViewport-minDepth-01234", + "text": " Unless VK_EXT_depth_range_unrestricted extension is enabled minDepth must be between 0.0 and 1.0, inclusive" + }, + { + "vuid": "VUID-VkViewport-maxDepth-01235", + "text": " Unless VK_EXT_depth_range_unrestricted extension is enabled maxDepth must be between 0.0 and 1.0, inclusive" + } + ], + "!(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-VkViewport-minDepth-01234", + "text": " minDepth must be between 0.0 and 1.0, inclusive" + }, + { + "vuid": "VUID-VkViewport-maxDepth-01235", + "text": " maxDepth must be between 0.0 and 1.0, inclusive" + } + ] + }, + "VkPipelineRasterizationStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-depthClampEnable-00782", + "text": " If the &amp;lt;&amp;lt;features-features-depthClamp,depth clamping&amp;gt;&amp;gt; feature is not enabled, depthClampEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineRasterizationConservativeStateCreateInfoEXT or VkPipelineRasterizationStateRasterizationOrderAMD" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-parameter", + "text": " polygonMode must be a valid VkPolygonMode value" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-cullMode-parameter", + "text": " cullMode must be a valid combination of VkCullModeFlagBits values" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-frontFace-parameter", + "text": " frontFace must be a valid VkFrontFace value" + } + ], + "!(VK_NV_fill_rectangle)": [ + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01413", + "text": " If the &amp;lt;&amp;lt;features-features-fillModeNonSolid,non-solid fill modes&amp;gt;&amp;gt; feature is not enabled, polygonMode must be VK_POLYGON_MODE_FILL" + } + ], + "(VK_NV_fill_rectangle)": [ + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01507", + "text": " If the &amp;lt;&amp;lt;features-features-fillModeNonSolid,non-solid fill modes&amp;gt;&amp;gt; feature is not enabled, polygonMode must be VK_POLYGON_MODE_FILL or VK_POLYGON_MODE_FILL_RECTANGLE_NV" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateCreateInfo-polygonMode-01414", + "text": " If the VK_NV_fill_rectangle extension is not enabled, polygonMode must not be VK_POLYGON_MODE_FILL_RECTANGLE_NV" + } + ] + }, + "VkPipelineMultisampleStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-sampleShadingEnable-00784", + "text": " If the &amp;lt;&amp;lt;features-features-sampleRateShading,sample rate shading&amp;gt;&amp;gt; feature is not enabled, sampleShadingEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-alphaToOneEnable-00785", + "text": " If the &amp;lt;&amp;lt;features-features-alphaToOne,alpha to one&amp;gt;&amp;gt; feature is not enabled, alphaToOneEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-minSampleShading-00786", + "text": " minSampleShading must be in the range [0,1]" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCoverageModulationStateCreateInfoNV, VkPipelineCoverageToColorStateCreateInfoNV, or VkPipelineSampleLocationsStateCreateInfoEXT" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-parameter", + "text": " rasterizationSamples must be a valid VkSampleCountFlagBits value" + }, + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-pSampleMask-parameter", + "text": " If pSampleMask is not NULL, pSampleMask must be a valid pointer to an array of \\(\\lceil{\\mathit{rasterizationSamples} \\over 32}\\rceil\\) VkSampleMask values" + } + ], + "(VK_NV_framebuffer_mixed_samples)": [ + { + "vuid": "VUID-VkPipelineMultisampleStateCreateInfo-rasterizationSamples-01415", + "text": " If the subpass has any color attachments and rasterizationSamples is greater than the number of color samples, then sampleShadingEnable must be VK_FALSE" + } + ] + }, + "VkPipelineRasterizationStateRasterizationOrderAMD": { + "(VK_AMD_rasterization_order)": [ + { + "vuid": "VUID-VkPipelineRasterizationStateRasterizationOrderAMD-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD" + }, + { + "vuid": "VUID-VkPipelineRasterizationStateRasterizationOrderAMD-rasterizationOrder-parameter", + "text": " rasterizationOrder must be a valid VkRasterizationOrderAMD value" + } + ] + }, + "VkPipelineSampleLocationsStateCreateInfoEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkPipelineSampleLocationsStateCreateInfoEXT-sampleLocationsInfo-parameter", + "text": " sampleLocationsInfo must be a valid VkSampleLocationsInfoEXT structure" + } + ] + }, + "VkSampleLocationsInfoEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-01526", + "text": " sampleLocationsPerPixel must be a bit value that is set in VkPhysicalDeviceSampleLocationsPropertiesEXT::sampleLocationSampleCounts" + }, + { + "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsCount-01527", + "text": " sampleLocationsCount must equal sampleLocationsPerPixel {times} sampleLocationGridSize.width {times} sampleLocationGridSize.height" + }, + { + "vuid": "VUID-VkSampleLocationsInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT" + }, + { + "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsPerPixel-parameter", + "text": " sampleLocationsPerPixel must be a valid VkSampleCountFlagBits value" + }, + { + "vuid": "VUID-VkSampleLocationsInfoEXT-pSampleLocations-parameter", + "text": " pSampleLocations must be a valid pointer to an array of sampleLocationsCount VkSampleLocationEXT structures" + }, + { + "vuid": "VUID-VkSampleLocationsInfoEXT-sampleLocationsCount-arraylength", + "text": " sampleLocationsCount must be greater than 0" + } + ] + }, + "vkCmdSetSampleLocationsEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkCmdSetSampleLocationsEXT-None-01528", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetSampleLocationsEXT-sampleLocationsPerPixel-01529", + "text": " The sampleLocationsPerPixel member of pSampleLocationsInfo must equal the rasterizationSamples member of the VkPipelineMultisampleStateCreateInfo structure the bound graphics pipeline has been created with" + }, + { + "vuid": "VUID-vkCmdSetSampleLocationsEXT-variableSampleLocations-01530", + "text": " If VkPhysicalDeviceSampleLocationsPropertiesEXT::variableSampleLocations is VK_FALSE then the current render pass must have been begun by specifying a VkRenderPassSampleLocationsBeginInfoEXT structure whose pPostSubpassSampleLocations member contains an element with a subpassIndex matching the current subpass index and the sampleLocationsInfo member of that element must match the sample locations state pointed to by pSampleLocationsInfo" + }, + { + "vuid": "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetSampleLocationsEXT-pSampleLocationsInfo-parameter", + "text": " pSampleLocationsInfo must be a valid pointer to a valid VkSampleLocationsInfoEXT structure" + }, + { + "vuid": "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetSampleLocationsEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "vkCmdSetLineWidth": { + "core": [ + { + "vuid": "VUID-vkCmdSetLineWidth-None-00787", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_LINE_WIDTH dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetLineWidth-lineWidth-00788", + "text": " If the wide lines feature is not enabled, lineWidth must be 1.0" + }, + { + "vuid": "VUID-vkCmdSetLineWidth-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetLineWidth-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetLineWidth-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "vkCmdSetDepthBias": { + "core": [ + { + "vuid": "VUID-vkCmdSetDepthBias-None-00789", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetDepthBias-depthBiasClamp-00790", + "text": " If the depth bias clamping feature is not enabled, depthBiasClamp must be 0.0" + }, + { + "vuid": "VUID-vkCmdSetDepthBias-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetDepthBias-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetDepthBias-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "VkPipelineRasterizationConservativeStateCreateInfoEXT": { + "(VK_EXT_conservative_rasterization)": [ + { + "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-extraPrimitiveOverestimationSize-01769", + "text": " extraPrimitiveOverestimationSize must be in the range of 0.0 to VkPhysicalDeviceConservativeRasterizationPropertiesEXT::maxExtraPrimitiveOverestimationSize inclusive" + }, + { + "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineRasterizationConservativeStateCreateInfoEXT-conservativeRasterizationMode-parameter", + "text": " conservativeRasterizationMode must be a valid VkConservativeRasterizationModeEXT value" + } + ] + }, + "VkPipelineDiscardRectangleStateCreateInfoEXT": { + "(VK_EXT_discard_rectangles)": [ + { + "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleCount-00582", + "text": " discardRectangleCount must be between 0 and VkPhysicalDeviceDiscardRectanglePropertiesEXT::maxDiscardRectangles, inclusive" + }, + { + "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineDiscardRectangleStateCreateInfoEXT-discardRectangleMode-parameter", + "text": " discardRectangleMode must be a valid VkDiscardRectangleModeEXT value" + } + ] + }, + "vkCmdSetDiscardRectangleEXT": { + "(VK_EXT_discard_rectangles)": [ + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-None-00583", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-firstDiscardRectangle-00585", + "text": " The sum of firstDiscardRectangle and discardRectangleCount must be less than or equal to VkPhysicalDeviceDiscardRectanglePropertiesEXT::maxDiscardRectangles" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-x-00587", + "text": " The x and y member of offset in each VkRect2D element of pDiscardRectangles must be greater than or equal to 0" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-offset-00588", + "text": " Evaluation of (offset.x + extent.width) in each VkRect2D element of pDiscardRectangles must not cause a signed integer addition overflow" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-offset-00589", + "text": " Evaluation of (offset.y + extent.height) in each VkRect2D element of pDiscardRectangles must not cause a signed integer addition overflow" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-pDiscardRectangles-parameter", + "text": " pDiscardRectangles must be a valid pointer to an array of discardRectangleCount VkRect2D structures" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdSetDiscardRectangleEXT-discardRectangleCount-arraylength", + "text": " discardRectangleCount must be greater than 0" + } + ] + }, + "vkCmdSetScissor": { + "core": [ + { + "vuid": "VUID-vkCmdSetScissor-None-00590", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_SCISSOR dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetScissor-firstScissor-00591", + "text": " firstScissor must be less than VkPhysicalDeviceLimits::maxViewports" + }, + { + "vuid": "VUID-vkCmdSetScissor-firstScissor-00592", + "text": " The sum of firstScissor and scissorCount must be between 1 and VkPhysicalDeviceLimits::maxViewports, inclusive" + }, + { + "vuid": "VUID-vkCmdSetScissor-firstScissor-00593", + "text": " If the multiple viewports feature is not enabled, firstScissor must be 0" + }, + { + "vuid": "VUID-vkCmdSetScissor-scissorCount-00594", + "text": " If the multiple viewports feature is not enabled, scissorCount must be 1" + }, + { + "vuid": "VUID-vkCmdSetScissor-x-00595", + "text": " The x and y members of offset must be greater than or equal to 0" + }, + { + "vuid": "VUID-vkCmdSetScissor-offset-00596", + "text": " Evaluation of (offset.x + extent.width) must not cause a signed integer addition overflow" + }, + { + "vuid": "VUID-vkCmdSetScissor-offset-00597", + "text": " Evaluation of (offset.y + extent.height) must not cause a signed integer addition overflow" + }, + { + "vuid": "VUID-vkCmdSetScissor-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetScissor-pScissors-parameter", + "text": " pScissors must be a valid pointer to an array of scissorCount VkRect2D structures" + }, + { + "vuid": "VUID-vkCmdSetScissor-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetScissor-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdSetScissor-scissorCount-arraylength", + "text": " scissorCount must be greater than 0" + } + ] + }, + "VkPipelineDepthStencilStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-depthBoundsTestEnable-00598", + "text": " If the &amp;lt;&amp;lt;features-features-depthBounds,depth bounds testing&amp;gt;&amp;gt; feature is not enabled, depthBoundsTestEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-depthCompareOp-parameter", + "text": " depthCompareOp must be a valid VkCompareOp value" + }, + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-front-parameter", + "text": " front must be a valid VkStencilOpState structure" + }, + { + "vuid": "VUID-VkPipelineDepthStencilStateCreateInfo-back-parameter", + "text": " back must be a valid VkStencilOpState structure" + } + ] + }, + "vkCmdSetDepthBounds": { + "core": [ + { + "vuid": "VUID-vkCmdSetDepthBounds-None-00599", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_DEPTH_BOUNDS dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetDepthBounds-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetDepthBounds-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetDepthBounds-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ], + "(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-vkCmdSetDepthBounds-minDepthBounds-00600", + "text": " Unless the VK_EXT_depth_range_unrestricted extension is enabled minDepthBounds must be between 0.0 and 1.0, inclusive" + }, + { + "vuid": "VUID-vkCmdSetDepthBounds-maxDepthBounds-00601", + "text": " Unless the VK_EXT_depth_range_unrestricted extension is enabled maxDepthBounds must be between 0.0 and 1.0, inclusive" + } + ], + "!(VK_EXT_depth_range_unrestricted)": [ + { + "vuid": "VUID-vkCmdSetDepthBounds-minDepthBounds-00600", + "text": " minDepthBounds must be between 0.0 and 1.0, inclusive" + }, + { + "vuid": "VUID-vkCmdSetDepthBounds-maxDepthBounds-00601", + "text": " maxDepthBounds must be between 0.0 and 1.0, inclusive" + } + ] + }, + "VkStencilOpState": { + "core": [ + { + "vuid": "VUID-VkStencilOpState-failOp-parameter", + "text": " failOp must be a valid VkStencilOp value" + }, + { + "vuid": "VUID-VkStencilOpState-passOp-parameter", + "text": " passOp must be a valid VkStencilOp value" + }, + { + "vuid": "VUID-VkStencilOpState-depthFailOp-parameter", + "text": " depthFailOp must be a valid VkStencilOp value" + }, + { + "vuid": "VUID-VkStencilOpState-compareOp-parameter", + "text": " compareOp must be a valid VkCompareOp value" + } + ] + }, + "vkCmdSetStencilCompareMask": { + "core": [ + { + "vuid": "VUID-vkCmdSetStencilCompareMask-None-00602", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetStencilCompareMask-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetStencilCompareMask-faceMask-parameter", + "text": " faceMask must be a valid combination of VkStencilFaceFlagBits values" + }, + { + "vuid": "VUID-vkCmdSetStencilCompareMask-faceMask-requiredbitmask", + "text": " faceMask must not be 0" + }, + { + "vuid": "VUID-vkCmdSetStencilCompareMask-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetStencilCompareMask-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "vkCmdSetStencilWriteMask": { + "core": [ + { + "vuid": "VUID-vkCmdSetStencilWriteMask-None-00603", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_STENCIL_WRITE_MASK dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetStencilWriteMask-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetStencilWriteMask-faceMask-parameter", + "text": " faceMask must be a valid combination of VkStencilFaceFlagBits values" + }, + { + "vuid": "VUID-vkCmdSetStencilWriteMask-faceMask-requiredbitmask", + "text": " faceMask must not be 0" + }, + { + "vuid": "VUID-vkCmdSetStencilWriteMask-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetStencilWriteMask-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "vkCmdSetStencilReference": { + "core": [ + { + "vuid": "VUID-vkCmdSetStencilReference-None-00604", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_STENCIL_REFERENCE dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetStencilReference-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetStencilReference-faceMask-parameter", + "text": " faceMask must be a valid combination of VkStencilFaceFlagBits values" + }, + { + "vuid": "VUID-vkCmdSetStencilReference-faceMask-requiredbitmask", + "text": " faceMask must not be 0" + }, + { + "vuid": "VUID-vkCmdSetStencilReference-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetStencilReference-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "VkPipelineCoverageToColorStateCreateInfoNV": { + "(VK_NV_fragment_coverage_to_color)": [ + { + "vuid": "VUID-VkPipelineCoverageToColorStateCreateInfoNV-coverageToColorEnable-01404", + "text": " If coverageToColorEnable is VK_TRUE, then the render pass subpass indicated by VkGraphicsPipelineCreateInfo::renderPass and VkGraphicsPipelineCreateInfo::subpass must have a color attachment at the location selected by coverageToColorLocation, with a VkFormat of VK_FORMAT_R8_UINT, VK_FORMAT_R8_SINT, VK_FORMAT_R16_UINT, VK_FORMAT_R16_SINT, VK_FORMAT_R32_UINT, or VK_FORMAT_R32_SINT" + }, + { + "vuid": "VUID-VkPipelineCoverageToColorStateCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkPipelineCoverageToColorStateCreateInfoNV-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "VkPipelineCoverageModulationStateCreateInfoNV": { + "(VK_NV_framebuffer_mixed_samples)": [ + { + "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableEnable-01405", + "text": " If coverageModulationTableEnable is VK_TRUE, coverageModulationTableCount must be equal to the number of rasterization samples divided by the number of color samples in the subpass." + }, + { + "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV" + }, + { + "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationMode-parameter", + "text": " coverageModulationMode must be a valid VkCoverageModulationModeNV value" + }, + { + "vuid": "VUID-VkPipelineCoverageModulationStateCreateInfoNV-coverageModulationTableCount-arraylength", + "text": " coverageModulationTableCount must be greater than 0" + } + ] + }, + "VkPipelineColorBlendStateCreateInfo": { + "core": [ + { + "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-00605", + "text": " If the &amp;lt;&amp;lt;features-features-independentBlend,independent blending&amp;gt;&amp;gt; feature is not enabled, all elements of pAttachments must be identical" + }, + { + "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00606", + "text": " If the &amp;lt;&amp;lt;features-features-logicOp,logic operations&amp;gt;&amp;gt; feature is not enabled, logicOpEnable must be VK_FALSE" + }, + { + "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-logicOpEnable-00607", + "text": " If logicOpEnable is VK_TRUE, logicOp must be a valid VkLogicOp value" + }, + { + "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO" + }, + { + "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineColorBlendAdvancedStateCreateInfoEXT" + }, + { + "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkPipelineColorBlendStateCreateInfo-pAttachments-parameter", + "text": " If attachmentCount is not 0, pAttachments must be a valid pointer to an array of attachmentCount valid VkPipelineColorBlendAttachmentState structures" + } + ] + }, + "VkPipelineColorBlendAttachmentState": { + "core": [ + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-00608", + "text": " If the &amp;lt;&amp;lt;features-features-dualSrcBlend,dual source blending&amp;gt;&amp;gt; feature is not enabled, srcColorBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-00609", + "text": " If the &amp;lt;&amp;lt;features-features-dualSrcBlend,dual source blending&amp;gt;&amp;gt; feature is not enabled, dstColorBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-00610", + "text": " If the &amp;lt;&amp;lt;features-features-dualSrcBlend,dual source blending&amp;gt;&amp;gt; feature is not enabled, srcAlphaBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-00611", + "text": " If the &amp;lt;&amp;lt;features-features-dualSrcBlend,dual source blending&amp;gt;&amp;gt; feature is not enabled, dstAlphaBlendFactor must not be VK_BLEND_FACTOR_SRC1_COLOR, VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, VK_BLEND_FACTOR_SRC1_ALPHA, or VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcColorBlendFactor-parameter", + "text": " srcColorBlendFactor must be a valid VkBlendFactor value" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstColorBlendFactor-parameter", + "text": " dstColorBlendFactor must be a valid VkBlendFactor value" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-parameter", + "text": " colorBlendOp must be a valid VkBlendOp value" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-srcAlphaBlendFactor-parameter", + "text": " srcAlphaBlendFactor must be a valid VkBlendFactor value" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-dstAlphaBlendFactor-parameter", + "text": " dstAlphaBlendFactor must be a valid VkBlendFactor value" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-alphaBlendOp-parameter", + "text": " alphaBlendOp must be a valid VkBlendOp value" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorWriteMask-parameter", + "text": " colorWriteMask must be a valid combination of VkColorComponentFlagBits values" + } + ], + "(VK_EXT_blend_operation_advanced)": [ + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01406", + "text": " If either of colorBlendOp or alphaBlendOp is an &amp;lt;&amp;lt;framebuffer-blend-advanced,advanced blend operation&amp;gt;&amp;gt;, then colorBlendOp must equal alphaBlendOp" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01407", + "text": " If VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendIndependentBlend is VK_FALSE and colorBlendOp is an &amp;lt;&amp;lt;framebuffer-blend-advanced,advanced blend operation&amp;gt;&amp;gt;, then colorBlendOp must be the same for all attachments." + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-advancedBlendIndependentBlend-01408", + "text": " If VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendIndependentBlend is VK_FALSE and alphaBlendOp is an &amp;lt;&amp;lt;framebuffer-blend-advanced,advanced blend operation&amp;gt;&amp;gt;, then alphaBlendOp must be the same for all attachments." + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-advancedBlendAllOperations-01409", + "text": " If VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendAllOperations is VK_FALSE, then colorBlendOp must not be VK_BLEND_OP_ZERO_EXT, VK_BLEND_OP_SRC_EXT, VK_BLEND_OP_DST_EXT, VK_BLEND_OP_SRC_OVER_EXT, VK_BLEND_OP_DST_OVER_EXT, VK_BLEND_OP_SRC_IN_EXT, VK_BLEND_OP_DST_IN_EXT, VK_BLEND_OP_SRC_OUT_EXT, VK_BLEND_OP_DST_OUT_EXT, VK_BLEND_OP_SRC_ATOP_EXT, VK_BLEND_OP_DST_ATOP_EXT, VK_BLEND_OP_XOR_EXT, VK_BLEND_OP_INVERT_EXT, VK_BLEND_OP_INVERT_RGB_EXT, VK_BLEND_OP_LINEARDODGE_EXT, VK_BLEND_OP_LINEARBURN_EXT, VK_BLEND_OP_VIVIDLIGHT_EXT, VK_BLEND_OP_LINEARLIGHT_EXT, VK_BLEND_OP_PINLIGHT_EXT, VK_BLEND_OP_HARDMIX_EXT, VK_BLEND_OP_PLUS_EXT, VK_BLEND_OP_PLUS_CLAMPED_EXT, VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, VK_BLEND_OP_PLUS_DARKER_EXT, VK_BLEND_OP_MINUS_EXT, VK_BLEND_OP_MINUS_CLAMPED_EXT, VK_BLEND_OP_CONTRAST_EXT, VK_BLEND_OP_INVERT_OVG_EXT, VK_BLEND_OP_RED_EXT, VK_BLEND_OP_GREEN_EXT, or VK_BLEND_OP_BLUE_EXT" + }, + { + "vuid": "VUID-VkPipelineColorBlendAttachmentState-colorBlendOp-01410", + "text": " If colorBlendOp or alphaBlendOp is an &amp;lt;&amp;lt;framebuffer-blend-advanced,advanced blend operation&amp;gt;&amp;gt;, then VkSubpassDescription::colorAttachmentCount of the subpass this pipeline is compiled against must be less than or equal to VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT::advancedBlendMaxColorAttachments" + } + ] + }, + "vkCmdSetBlendConstants": { + "core": [ + { + "vuid": "VUID-vkCmdSetBlendConstants-None-00612", + "text": " The bound graphics pipeline must have been created with the VK_DYNAMIC_STATE_BLEND_CONSTANTS dynamic state enabled" + }, + { + "vuid": "VUID-vkCmdSetBlendConstants-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdSetBlendConstants-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdSetBlendConstants-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + } + ] + }, + "VkPipelineColorBlendAdvancedStateCreateInfoEXT": { + "(VK_EXT_blend_operation_advanced)": [ + { + "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-srcPremultiplied-01424", + "text": " If the &amp;lt;&amp;lt;features-limits-advancedBlendNonPremultipliedSrcColor,non-premultiplied source color&amp;gt;&amp;gt; property is not supported, srcPremultiplied must be VK_TRUE" + }, + { + "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-dstPremultiplied-01425", + "text": " If the &amp;lt;&amp;lt;features-limits-advancedBlendNonPremultipliedDstColor,non-premultiplied destination color&amp;gt;&amp;gt; property is not supported, dstPremultiplied must be VK_TRUE" + }, + { + "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-01426", + "text": " If the &amp;lt;&amp;lt;features-limits-advancedBlendCorrelatedOverlap,correlated overlap&amp;gt;&amp;gt; property is not supported, blendOverlap must be VK_BLEND_OVERLAP_UNCORRELATED_EXT" + }, + { + "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkPipelineColorBlendAdvancedStateCreateInfoEXT-blendOverlap-parameter", + "text": " blendOverlap must be a valid VkBlendOverlapEXT value" + } + ] + }, + "vkCmdDispatch": { + "core": [ + { + "vuid": "VUID-vkCmdDispatch-groupCountX-00386", + "text": " groupCountX must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0]" + }, + { + "vuid": "VUID-vkCmdDispatch-groupCountY-00387", + "text": " groupCountY must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + }, + { + "vuid": "VUID-vkCmdDispatch-groupCountZ-00388", + "text": " groupCountZ must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00389", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00390", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00391", + "text": " A valid compute pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_COMPUTE" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00392", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a push constant value must have been set for VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for push constants with the one used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00393", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00394", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00395", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00396", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00397", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDispatch-linearTilingFeatures-00398", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDispatch-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDispatch-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDispatch-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdDispatch-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDispatch-linearTilingFeatures-00399", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDispatch-None-00400", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDispatch-commandBuffer-01844", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDispatch-commandBuffer-01845", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_POINT_COMPUTE writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDispatch-commandBuffer-01846", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the compute pipeline stage in the VkPipeline object bound to VK_PIPELINE_POINT_COMPUTE reads from any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ] + }, + "vkCmdDispatchIndirect": { + "core": [ + { + "vuid": "VUID-vkCmdDispatchIndirect-buffer-00401", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00402", + "text": " For each set n that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a descriptor set must have been bound to n at VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00403", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the bound VkPipeline object, specified via vkCmdBindPipeline" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00404", + "text": " A valid compute pipeline must be bound to the current command buffer with VK_PIPELINE_BIND_POINT_COMPUTE" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-buffer-00405", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-offset-00406", + "text": " offset must be a multiple of 4" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-offset-00407", + "text": " The sum of offset and the size of VkDispatchIndirectCommand must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00408", + "text": " For each push constant that is statically used by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE, a push constant value must have been set for VK_PIPELINE_BIND_POINT_COMPUTE, with a VkPipelineLayout that is compatible for push constants with the one used to create the current VkPipeline, as described in &amp;lt;&amp;lt;descriptorsets-compatibility&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00409", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00410", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00411", + "text": " If any VkSampler object that is accessed from a shader by the VkPipeline bound to VK_PIPELINE_BIND_POINT_COMPUTE uses unnormalized coordinates, it must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00412", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a uniform buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00413", + "text": " If the &amp;lt;&amp;lt;features-features-robustBufferAccess,robust buffer access&amp;gt;&amp;gt; feature is not enabled, and any shader stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE accesses a storage buffer, it must not access values outside of the range of that buffer specified in the bound descriptor set" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-linearTilingFeatures-00414", + "text": " Any VkImageView being sampled with VK_FILTER_LINEAR as a result of this command must be of a format which supports linear filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-commonparent", + "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ], + "(VK_IMG_filter_cubic)": [ + { + "vuid": "VUID-vkCmdDispatchIndirect-linearTilingFeatures-00415", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must be of a format which supports cubic filtering, as specified by the VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG flag in VkFormatProperties::linearTilingFeatures (for a linear image) or VkFormatProperties::optimalTilingFeatures(for an optimally tiled image) returned by vkGetPhysicalDeviceFormatProperties" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-00416", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_IMG as a result of this command must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + } + ], + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-01847", + "text": " If commandBuffer is an unprotected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_BIND_POINT_COMPUTE reads from or writes to any image or buffer, that image or buffer must not be a protected image or protected buffer." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-01848", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage in the VkPipeline object bound to VK_PIPELINE_POINT_COMPUTE writes to any image or buffer, that image or buffer must not be an unprotected image or unprotected buffer." + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-commandBuffer-01849", + "text": " If commandBuffer is a protected command buffer, and any pipeline stage other than the compute pipeline stage in the VkPipeline object bound to VK_PIPELINE_POINT_COMPUTE reads from any image or buffer, the image or buffer must not be a protected image or protected buffer." + } + ] + }, + "VkDispatchIndirectCommand": { + "core": [ + { + "vuid": "VUID-VkDispatchIndirectCommand-x-00417", + "text": " x must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0]" + }, + { + "vuid": "VUID-VkDispatchIndirectCommand-y-00418", + "text": " y must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + }, + { + "vuid": "VUID-VkDispatchIndirectCommand-z-00419", + "text": " z must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" + } + ] + }, + "vkCmdDispatchBase": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkCmdDispatchBase-None-00420", + "text": " All valid usage rules from vkCmdDispatch apply" + }, + { + "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00421", + "text": " baseGroupX must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0]" + }, + { + "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00422", + "text": " baseGroupX must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1]" + }, + { + "vuid": "VUID-vkCmdDispatchBase-baseGroupZ-00423", + "text": " baseGroupZ must be less than VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2]" + }, + { + "vuid": "VUID-vkCmdDispatchBase-groupCountX-00424", + "text": " groupCountX must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[0] minus baseGroupX" + }, + { + "vuid": "VUID-vkCmdDispatchBase-groupCountY-00425", + "text": " groupCountY must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[1] minus baseGroupY" + }, + { + "vuid": "VUID-vkCmdDispatchBase-groupCountZ-00426", + "text": " groupCountZ must be less than or equal to VkPhysicalDeviceLimits::maxComputeWorkGroupCount[2] minus baseGroupZ" + }, + { + "vuid": "VUID-vkCmdDispatchBase-baseGroupX-00427", + "text": " If any of baseGroupX, baseGroupY, or baseGroupZ are not zero, then the bound compute pipeline must have been created with the VK_PIPELINE_CREATE_DISPATCH_BASE flag." + }, + { + "vuid": "VUID-vkCmdDispatchBase-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDispatchBase-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDispatchBase-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support compute operations" + }, + { + "vuid": "VUID-vkCmdDispatchBase-renderpass", + "text": " This command must only be called outside of a render pass instance" + } + ] + }, + "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pFeatures-parameter", + "text": " pFeatures must be a valid pointer to a VkDeviceGeneratedCommandsFeaturesNVX structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX-pLimits-parameter", + "text": " pLimits must be a valid pointer to a VkDeviceGeneratedCommandsLimitsNVX structure" + } + ] + }, + "VkDeviceGeneratedCommandsFeaturesNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkDeviceGeneratedCommandsFeaturesNVX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX" + }, + { + "vuid": "VUID-VkDeviceGeneratedCommandsFeaturesNVX-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkDeviceGeneratedCommandsLimitsNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkDeviceGeneratedCommandsLimitsNVX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX" + }, + { + "vuid": "VUID-VkDeviceGeneratedCommandsLimitsNVX-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkCreateObjectTableNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkCreateObjectTableNVX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateObjectTableNVX-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkObjectTableCreateInfoNVX structure" + }, + { + "vuid": "VUID-vkCreateObjectTableNVX-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateObjectTableNVX-pObjectTable-parameter", + "text": " pObjectTable must be a valid pointer to a VkObjectTableNVX handle" + } + ] + }, + "VkObjectTableCreateInfoNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-computeBindingPointSupport-01355", + "text": " If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, pObjectEntryUsageFlags must not contain VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-01356", + "text": " Any value within pObjectEntryCounts must not exceed VkDeviceGeneratedCommandsLimitsNVX::maxObjectEntryCounts" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-maxUniformBuffersPerDescriptor-01357", + "text": " maxUniformBuffersPerDescriptor must be within the limits supported by the device." + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-maxStorageBuffersPerDescriptor-01358", + "text": " maxStorageBuffersPerDescriptor must be within the limits supported by the device." + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-maxStorageImagesPerDescriptor-01359", + "text": " maxStorageImagesPerDescriptor must be within the limits supported by the device." + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-maxSampledImagesPerDescriptor-01360", + "text": " maxSampledImagesPerDescriptor must be within the limits supported by the device." + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryTypes-parameter", + "text": " pObjectEntryTypes must be a valid pointer to an array of objectCount valid VkObjectEntryTypeNVX values" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryCounts-parameter", + "text": " pObjectEntryCounts must be a valid pointer to an array of objectCount uint32_t values" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-parameter", + "text": " pObjectEntryUsageFlags must be a valid pointer to an array of objectCount valid combinations of VkObjectEntryUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-pObjectEntryUsageFlags-requiredbitmask", + "text": " Each element of pObjectEntryUsageFlags must not be 0" + }, + { + "vuid": "VUID-VkObjectTableCreateInfoNVX-objectCount-arraylength", + "text": " objectCount must be greater than 0" + } + ] + }, + "vkDestroyObjectTableNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01361", + "text": " All submitted commands that refer to objectTable must have completed execution." + }, + { + "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01362", + "text": " If VkAllocationCallbacks were provided when objectTable was created, a compatible set of callbacks must be provided here." + }, + { + "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-01363", + "text": " If no VkAllocationCallbacks were provided when objectTable was created, pAllocator must be NULL." + }, + { + "vuid": "VUID-vkDestroyObjectTableNVX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-parameter", + "text": " objectTable must be a valid VkObjectTableNVX handle" + }, + { + "vuid": "VUID-vkDestroyObjectTableNVX-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyObjectTableNVX-objectTable-parent", + "text": " objectTable must have been created, allocated, or retrieved from device" + } + ] + }, + "vkRegisterObjectsNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkRegisterObjectsNVX-pObjectTableEntry-01364", + "text": " The contents of pObjectTableEntry must yield plausible bindings supported by the device." + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-01365", + "text": " At any pObjectIndices there must not be a registered resource already." + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-01366", + "text": " Any value inside pObjectIndices must be below the appropriate VkObjectTableCreateInfoNVX::pObjectEntryCounts limits provided at objectTable creation time." + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-objectTable-parameter", + "text": " objectTable must be a valid VkObjectTableNVX handle" + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-ppObjectTableEntries-parameter", + "text": " ppObjectTableEntries must be a valid pointer to an array of objectCount valid VkObjectTableEntryNVX structures" + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-pObjectIndices-parameter", + "text": " pObjectIndices must be a valid pointer to an array of objectCount uint32_t values" + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-objectCount-arraylength", + "text": " objectCount must be greater than 0" + }, + { + "vuid": "VUID-vkRegisterObjectsNVX-objectTable-parent", + "text": " objectTable must have been created, allocated, or retrieved from device" + } + ] + }, + "VkObjectTableEntryNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkObjectTableEntryNVX-computeBindingPointSupport-01367", + "text": " If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, flags must not contain VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX" + }, + { + "vuid": "VUID-VkObjectTableEntryNVX-type-parameter", + "text": " type must be a valid VkObjectEntryTypeNVX value" + }, + { + "vuid": "VUID-VkObjectTableEntryNVX-flags-parameter", + "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkObjectTableEntryNVX-flags-requiredbitmask", + "text": " flags must not be 0" + } + ] + }, + "VkObjectTablePipelineEntryNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkObjectTablePipelineEntryNVX-type-01368", + "text": " type must be VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX" + }, + { + "vuid": "VUID-VkObjectTablePipelineEntryNVX-type-parameter", + "text": " type must be a valid VkObjectEntryTypeNVX value" + }, + { + "vuid": "VUID-VkObjectTablePipelineEntryNVX-flags-parameter", + "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkObjectTablePipelineEntryNVX-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkObjectTablePipelineEntryNVX-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + } + ] + }, + "VkObjectTableDescriptorSetEntryNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-type-01369", + "text": " type must be VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX" + }, + { + "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-type-parameter", + "text": " type must be a valid VkObjectEntryTypeNVX value" + }, + { + "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-flags-parameter", + "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-pipelineLayout-parameter", + "text": " pipelineLayout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-descriptorSet-parameter", + "text": " descriptorSet must be a valid VkDescriptorSet handle" + }, + { + "vuid": "VUID-VkObjectTableDescriptorSetEntryNVX-commonparent", + "text": " Both of descriptorSet, and pipelineLayout must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkObjectTableVertexBufferEntryNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-type-01370", + "text": " type must be VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX" + }, + { + "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-type-parameter", + "text": " type must be a valid VkObjectEntryTypeNVX value" + }, + { + "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-flags-parameter", + "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkObjectTableVertexBufferEntryNVX-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + } + ] + }, + "VkObjectTableIndexBufferEntryNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-type-01371", + "text": " type must be VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX" + }, + { + "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-type-parameter", + "text": " type must be a valid VkObjectEntryTypeNVX value" + }, + { + "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-flags-parameter", + "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkObjectTableIndexBufferEntryNVX-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" + } + ] + }, + "VkObjectTablePushConstantEntryNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkObjectTablePushConstantEntryNVX-type-01372", + "text": " type must be VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX" + }, + { + "vuid": "VUID-VkObjectTablePushConstantEntryNVX-type-parameter", + "text": " type must be a valid VkObjectEntryTypeNVX value" + }, + { + "vuid": "VUID-VkObjectTablePushConstantEntryNVX-flags-parameter", + "text": " flags must be a valid combination of VkObjectEntryUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkObjectTablePushConstantEntryNVX-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkObjectTablePushConstantEntryNVX-pipelineLayout-parameter", + "text": " pipelineLayout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkObjectTablePushConstantEntryNVX-stageFlags-parameter", + "text": " stageFlags must be a valid combination of VkShaderStageFlagBits values" + }, + { + "vuid": "VUID-VkObjectTablePushConstantEntryNVX-stageFlags-requiredbitmask", + "text": " stageFlags must not be 0" + } + ] + }, + "vkUnregisterObjectsNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkUnregisterObjectsNVX-pObjectIndices-01373", + "text": " At any pObjectIndices there must be a registered resource already." + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-01374", + "text": " The pObjectEntryTypes of the resource at pObjectIndices must match." + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-None-01375", + "text": " All operations on the device using the registered resource must have been completed." + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-objectTable-parameter", + "text": " objectTable must be a valid VkObjectTableNVX handle" + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-pObjectEntryTypes-parameter", + "text": " pObjectEntryTypes must be a valid pointer to an array of objectCount valid VkObjectEntryTypeNVX values" + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-pObjectIndices-parameter", + "text": " pObjectIndices must be a valid pointer to an array of objectCount uint32_t values" + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-objectCount-arraylength", + "text": " objectCount must be greater than 0" + }, + { + "vuid": "VUID-vkUnregisterObjectsNVX-objectTable-parent", + "text": " objectTable must have been created, allocated, or retrieved from device" + } + ] + }, + "VkIndirectCommandsLayoutTokenNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-bindingUnit-01342", + "text": " bindingUnit must stay within device supported limits for the appropriate commands." + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-dynamicCount-01343", + "text": " dynamicCount must stay within device supported limits for the appropriate commands." + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-divisor-01344", + "text": " divisor must be greater than 0 and a power of two." + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutTokenNVX-tokenType-parameter", + "text": " tokenType must be a valid VkIndirectCommandsTokenTypeNVX value" + } + ] + }, + "VkIndirectCommandsTokenNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkIndirectCommandsTokenNVX-buffer-01345", + "text": " The buffer’s usage flag must have the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set." + }, + { + "vuid": "VUID-VkIndirectCommandsTokenNVX-offset-01346", + "text": " The offset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minCommandsTokenBufferOffsetAlignment." + }, + { + "vuid": "VUID-VkIndirectCommandsTokenNVX-tokenType-parameter", + "text": " tokenType must be a valid VkIndirectCommandsTokenTypeNVX value" + }, + { + "vuid": "VUID-VkIndirectCommandsTokenNVX-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + } + ] + }, + "vkCreateIndirectCommandsLayoutNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkIndirectCommandsLayoutCreateInfoNVX structure" + }, + { + "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateIndirectCommandsLayoutNVX-pIndirectCommandsLayout-parameter", + "text": " pIndirectCommandsLayout must be a valid pointer to a VkIndirectCommandsLayoutNVX handle" + } + ] + }, + "VkIndirectCommandsLayoutCreateInfoNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-01347", + "text": " tokenCount must be greater than 0 and below VkDeviceGeneratedCommandsLimitsNVX::maxIndirectCommandsLayoutTokenCount" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-computeBindingPointSupport-01348", + "text": " If the VkDeviceGeneratedCommandsFeaturesNVX::computeBindingPointSupport feature is not enabled, then pipelineBindPoint must not be VK_PIPELINE_BIND_POINT_COMPUTE" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01349", + "text": " If pTokens contains an entry of VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX it must be the first element of the array and there must be only a single element of such token type." + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01350", + "text": " All state binding tokens in pTokens must occur prior work provoking tokens (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX)." + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-01351", + "text": " The content of pTokens must include one single work provoking token that is compatible with the pipelineBindPoint." + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-parameter", + "text": " flags must be a valid combination of VkIndirectCommandsLayoutUsageFlagBitsNVX values" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-pTokens-parameter", + "text": " pTokens must be a valid pointer to an array of tokenCount valid VkIndirectCommandsLayoutTokenNVX structures" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNVX-tokenCount-arraylength", + "text": " tokenCount must be greater than 0" + } + ] + }, + "vkDestroyIndirectCommandsLayoutNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-01352", + "text": " All submitted commands that refer to indirectCommandsLayout must have completed execution" + }, + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01353", + "text": " If VkAllocationCallbacks were provided when objectTable was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-objectTable-01354", + "text": " If no VkAllocationCallbacks were provided when objectTable was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parameter", + "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle" + }, + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyIndirectCommandsLayoutNVX-indirectCommandsLayout-parent", + "text": " indirectCommandsLayout must have been created, allocated, or retrieved from device" + } + ] + }, + "vkCmdReserveSpaceForCommandsNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01329", + "text": " The provided commandBuffer must not have had a prior space reservation since its creation or the last reset." + }, + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-01330", + "text": " The state of the commandBuffer must be legal to execute all commands within the sequence provided by the indirectCommandsLayout member of pProcessCommandsInfo." + }, + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-pReserveSpaceInfo-parameter", + "text": " pReserveSpaceInfo must be a valid pointer to a valid VkCmdReserveSpaceForCommandsInfoNVX structure" + }, + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-renderpass", + "text": " This command must only be called inside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdReserveSpaceForCommandsNVX-bufferlevel", + "text": " commandBuffer must be a secondary VkCommandBuffer" + } + ] + }, + "VkCmdReserveSpaceForCommandsInfoNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX" + }, + { + "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-objectTable-parameter", + "text": " objectTable must be a valid VkObjectTableNVX handle" + }, + { + "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-indirectCommandsLayout-parameter", + "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle" + }, + { + "vuid": "VUID-VkCmdReserveSpaceForCommandsInfoNVX-commonparent", + "text": " Both of indirectCommandsLayout, and objectTable must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkCmdProcessCommandsNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdProcessCommandsNVX-pProcessCommandsInfo-parameter", + "text": " pProcessCommandsInfo must be a valid pointer to a valid VkCmdProcessCommandsInfoNVX structure" + }, + { + "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdProcessCommandsNVX-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdProcessCommandsNVX-renderpass", + "text": " This command must only be called inside of a render pass instance" + } + ] + }, + "VkCmdProcessCommandsInfoNVX": { + "(VK_NVX_device_generated_commands)": [ + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-objectTable-01331", + "text": " The provided objectTable must include all objects referenced by the generation process." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-01332", + "text": " indirectCommandsTokenCount must match the indirectCommandsLayout’s tokenCount." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-tokenType-01333", + "text": " The tokenType member of each entry in the pIndirectCommandsTokens array must match the values used at creation time of indirectCommandsLayout" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01334", + "text": " If targetCommandBuffer is provided, it must have reserved command space." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01335", + "text": " If targetCommandBuffer is provided, the objectTable must match the reservation’s objectTable and must have had all referenced objects registered at reservation time." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01336", + "text": " If targetCommandBuffer is provided, the indirectCommandsLayout must match the reservation’s indirectCommandsLayout." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-01337", + "text": " If targetCommandBuffer is provided, the maxSequencesCount must not exceed the reservation’s maxSequencesCount." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01338", + "text": " If sequencesCountBuffer is used, its usage flag must have VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-01339", + "text": " If sequencesCountBuffer is used, sequencesCountOffset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minSequenceCountBufferOffsetAlignment." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01340", + "text": " If sequencesIndexBuffer is used, its usage flag must have VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT bit set." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-01341", + "text": " If sequencesIndexBuffer is used, sequencesIndexOffset must be aligned to VkDeviceGeneratedCommandsLimitsNVX::minSequenceIndexBufferOffsetAlignment." + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-objectTable-parameter", + "text": " objectTable must be a valid VkObjectTableNVX handle" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsLayout-parameter", + "text": " indirectCommandsLayout must be a valid VkIndirectCommandsLayoutNVX handle" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-pIndirectCommandsTokens-parameter", + "text": " pIndirectCommandsTokens must be a valid pointer to an array of indirectCommandsTokenCount valid VkIndirectCommandsTokenNVX structures" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-targetCommandBuffer-parameter", + "text": " If targetCommandBuffer is not NULL, targetCommandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesCountBuffer-parameter", + "text": " If sequencesCountBuffer is not VK_NULL_HANDLE, sequencesCountBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-sequencesIndexBuffer-parameter", + "text": " If sequencesIndexBuffer is not VK_NULL_HANDLE, sequencesIndexBuffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-indirectCommandsTokenCount-arraylength", + "text": " indirectCommandsTokenCount must be greater than 0" + }, + { + "vuid": "VUID-VkCmdProcessCommandsInfoNVX-commonparent", + "text": " Each of indirectCommandsLayout, objectTable, sequencesCountBuffer, sequencesIndexBuffer, and targetCommandBuffer that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkGetPhysicalDeviceSparseImageFormatProperties": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-01094", + "text": " samples must be a bit value that is set in VkImageFormatProperties::sampleCounts returned by vkGetPhysicalDeviceImageFormatProperties with format, type, tiling, and usage equal to those in this command and flags equal to the value that is set in VkImageCreateInfo::flags when the image is created" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-type-parameter", + "text": " type must be a valid VkImageType value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-samples-parameter", + "text": " samples must be a valid VkSampleCountFlagBits value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-parameter", + "text": " usage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-tiling-parameter", + "text": " tiling must be a valid VkImageTiling value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkSparseImageFormatProperties structures" + } + ] + }, + "vkGetPhysicalDeviceSparseImageFormatProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pFormatInfo-parameter", + "text": " pFormatInfo must be a valid pointer to a valid VkPhysicalDeviceSparseImageFormatInfo2 structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSparseImageFormatProperties2-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkSparseImageFormatProperties2 structures" + } + ] + }, + "VkPhysicalDeviceSparseImageFormatInfo2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-01095", + "text": " samples must be a bit value that is set in VkImageFormatProperties::sampleCounts returned by vkGetPhysicalDeviceImageFormatProperties with format, type, tiling, and usage equal to those in this command and flags equal to the value that is set in VkImageCreateInfo::flags when the image is created" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-type-parameter", + "text": " type must be a valid VkImageType value" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-samples-parameter", + "text": " samples must be a valid VkSampleCountFlagBits value" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-parameter", + "text": " usage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-VkPhysicalDeviceSparseImageFormatInfo2-tiling-parameter", + "text": " tiling must be a valid VkImageTiling value" + } + ] + }, + "VkSparseImageFormatProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkSparseImageFormatProperties2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2" + }, + { + "vuid": "VUID-VkSparseImageFormatProperties2-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetImageSparseMemoryRequirements": { + "core": [ + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirementCount-parameter", + "text": " pSparseMemoryRequirementCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements-pSparseMemoryRequirements-parameter", + "text": " If the value referenced by pSparseMemoryRequirementCount is not 0, and pSparseMemoryRequirements is not NULL, pSparseMemoryRequirements must be a valid pointer to an array of pSparseMemoryRequirementCount VkSparseImageMemoryRequirements structures" + }, + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements-image-parent", + "text": " image must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetImageSparseMemoryRequirements2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements2-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements2-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkImageSparseMemoryRequirementsInfo2 structure" + }, + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirementCount-parameter", + "text": " pSparseMemoryRequirementCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetImageSparseMemoryRequirements2-pSparseMemoryRequirements-parameter", + "text": " If the value referenced by pSparseMemoryRequirementCount is not 0, and pSparseMemoryRequirements is not NULL, pSparseMemoryRequirements must be a valid pointer to an array of pSparseMemoryRequirementCount VkSparseImageMemoryRequirements2 structures" + } + ] + }, + "VkImageSparseMemoryRequirementsInfo2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-VkImageSparseMemoryRequirementsInfo2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2" + }, + { + "vuid": "VUID-VkImageSparseMemoryRequirementsInfo2-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkImageSparseMemoryRequirementsInfo2-image-parameter", + "text": " image must be a valid VkImage handle" + } + ] + }, + "VkSparseImageMemoryRequirements2": { + "(VK_VERSION_1_1,VK_KHR_get_memory_requirements2)": [ + { + "vuid": "VUID-VkSparseImageMemoryRequirements2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2" + }, + { + "vuid": "VUID-VkSparseImageMemoryRequirements2-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "VkSparseMemoryBind": { + "core": [ + { + "vuid": "VUID-VkSparseMemoryBind-memory-01096", + "text": " If memory is not VK_NULL_HANDLE, memory and memoryOffset must match the memory requirements of the resource, as described in section &amp;lt;&amp;lt;resources-association&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkSparseMemoryBind-memory-01097", + "text": " If memory is not VK_NULL_HANDLE, memory must not have been created with a memory type that reports VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT bit set" + }, + { + "vuid": "VUID-VkSparseMemoryBind-size-01098", + "text": " size must be greater than 0" + }, + { + "vuid": "VUID-VkSparseMemoryBind-resourceOffset-01099", + "text": " resourceOffset must be less than the size of the resource" + }, + { + "vuid": "VUID-VkSparseMemoryBind-size-01100", + "text": " size must be less than or equal to the size of the resource minus resourceOffset" + }, + { + "vuid": "VUID-VkSparseMemoryBind-memoryOffset-01101", + "text": " memoryOffset must be less than the size of memory" + }, + { + "vuid": "VUID-VkSparseMemoryBind-size-01102", + "text": " size must be less than or equal to the size of memory minus memoryOffset" + }, + { + "vuid": "VUID-VkSparseMemoryBind-memory-parameter", + "text": " If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-VkSparseMemoryBind-flags-parameter", + "text": " flags must be a valid combination of VkSparseMemoryBindFlagBits values" + } + ] + }, + "VkSparseBufferMemoryBindInfo": { + "core": [ + { + "vuid": "VUID-VkSparseBufferMemoryBindInfo-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-VkSparseBufferMemoryBindInfo-pBinds-parameter", + "text": " pBinds must be a valid pointer to an array of bindCount valid VkSparseMemoryBind structures" + }, + { + "vuid": "VUID-VkSparseBufferMemoryBindInfo-bindCount-arraylength", + "text": " bindCount must be greater than 0" + } + ] + }, + "VkSparseImageOpaqueMemoryBindInfo": { + "core": [ + { + "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-01103", + "text": " If the flags member of any element of pBinds contains VK_SPARSE_MEMORY_BIND_METADATA_BIT, the binding range defined must be within the mip tail region of the metadata aspect of image" + }, + { + "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-pBinds-parameter", + "text": " pBinds must be a valid pointer to an array of bindCount valid VkSparseMemoryBind structures" + }, + { + "vuid": "VUID-VkSparseImageOpaqueMemoryBindInfo-bindCount-arraylength", + "text": " bindCount must be greater than 0" + } + ] + }, + "VkSparseImageMemoryBindInfo": { + "core": [ + { + "vuid": "VUID-VkSparseImageMemoryBindInfo-subresource-01722", + "text": " The subresource.mipLevel member of each element of pBinds must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkSparseImageMemoryBindInfo-subresource-01723", + "text": " The subresource.arrayLayer member of each element of pBinds must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkSparseImageMemoryBindInfo-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkSparseImageMemoryBindInfo-pBinds-parameter", + "text": " pBinds must be a valid pointer to an array of bindCount valid VkSparseImageMemoryBind structures" + }, + { + "vuid": "VUID-VkSparseImageMemoryBindInfo-bindCount-arraylength", + "text": " bindCount must be greater than 0" + } + ] + }, + "VkSparseImageMemoryBind": { + "core": [ + { + "vuid": "VUID-VkSparseImageMemoryBind-memory-01104", + "text": " If the &amp;lt;&amp;lt;features-features-sparseResidencyAliased,sparse aliased residency&amp;gt;&amp;gt; feature is not enabled, and if any other resources are bound to ranges of memory, the range of memory being bound must not overlap with those bound ranges" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-memory-01105", + "text": " memory and memoryOffset must match the memory requirements of the calling command’s image, as described in section &amp;lt;&amp;lt;resources-association&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-subresource-01106", + "text": " subresource must be a valid image subresource for image (see &amp;lt;&amp;lt;resources-image-views&amp;gt;&amp;gt;)" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-offset-01107", + "text": " offset.x must be a multiple of the sparse image block width (VkSparseImageFormatProperties::imageGranularity.width) of the image" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-extent-01108", + "text": " extent.width must either be a multiple of the sparse image block width of the image, or else (extent.width + offset.x) must equal the width of the image subresource" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-offset-01109", + "text": " offset.y must be a multiple of the sparse image block height (VkSparseImageFormatProperties::imageGranularity.height) of the image" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-extent-01110", + "text": " extent.height must either be a multiple of the sparse image block height of the image, or else (extent.height + offset.y) must equal the height of the image subresource" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-offset-01111", + "text": " offset.z must be a multiple of the sparse image block depth (VkSparseImageFormatProperties::imageGranularity.depth) of the image" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-extent-01112", + "text": " extent.depth must either be a multiple of the sparse image block depth of the image, or else (extent.depth + offset.z) must equal the depth of the image subresource" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-subresource-parameter", + "text": " subresource must be a valid VkImageSubresource structure" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-memory-parameter", + "text": " If memory is not VK_NULL_HANDLE, memory must be a valid VkDeviceMemory handle" + }, + { + "vuid": "VUID-VkSparseImageMemoryBind-flags-parameter", + "text": " flags must be a valid combination of VkSparseMemoryBindFlagBits values" + } + ] + }, + "vkQueueBindSparse": { + "core": [ + { + "vuid": "VUID-vkQueueBindSparse-fence-01113", + "text": " If fence is not VK_NULL_HANDLE, fence must be unsignaled" + }, + { + "vuid": "VUID-vkQueueBindSparse-fence-01114", + "text": " If fence is not VK_NULL_HANDLE, fence must not be associated with any other queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-vkQueueBindSparse-pSignalSemaphores-01115", + "text": " Each element of the pSignalSemaphores member of each element of pBindInfo must be unsignaled when the semaphore signal operation it defines is executed on the device" + }, + { + "vuid": "VUID-vkQueueBindSparse-pWaitSemaphores-01116", + "text": " When a semaphore unsignal operation defined by any element of the pWaitSemaphores member of any element of pBindInfo executes on queue, no other queue must be waiting on the same semaphore." + }, + { + "vuid": "VUID-vkQueueBindSparse-pWaitSemaphores-01117", + "text": " All elements of the pWaitSemaphores member of all elements of pBindInfo must be semaphores that are signaled, or have &amp;lt;&amp;lt;synchronization-semaphores-signaling, semaphore signal operations&amp;gt;&amp;gt; previously submitted for execution." + }, + { + "vuid": "VUID-vkQueueBindSparse-queue-parameter", + "text": " queue must be a valid VkQueue handle" + }, + { + "vuid": "VUID-vkQueueBindSparse-pBindInfo-parameter", + "text": " If bindInfoCount is not 0, pBindInfo must be a valid pointer to an array of bindInfoCount valid VkBindSparseInfo structures" + }, + { + "vuid": "VUID-vkQueueBindSparse-fence-parameter", + "text": " If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-vkQueueBindSparse-queuetype", + "text": " The queue must support sparse binding operations" + }, + { + "vuid": "VUID-vkQueueBindSparse-commonparent", + "text": " Both of fence, and queue that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkBindSparseInfo": { + "core": [ + { + "vuid": "VUID-VkBindSparseInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BIND_SPARSE_INFO" + }, + { + "vuid": "VUID-VkBindSparseInfo-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkDeviceGroupBindSparseInfo" + }, + { + "vuid": "VUID-VkBindSparseInfo-pWaitSemaphores-parameter", + "text": " If waitSemaphoreCount is not 0, pWaitSemaphores must be a valid pointer to an array of waitSemaphoreCount valid VkSemaphore handles" + }, + { + "vuid": "VUID-VkBindSparseInfo-pBufferBinds-parameter", + "text": " If bufferBindCount is not 0, pBufferBinds must be a valid pointer to an array of bufferBindCount valid VkSparseBufferMemoryBindInfo structures" + }, + { + "vuid": "VUID-VkBindSparseInfo-pImageOpaqueBinds-parameter", + "text": " If imageOpaqueBindCount is not 0, pImageOpaqueBinds must be a valid pointer to an array of imageOpaqueBindCount valid VkSparseImageOpaqueMemoryBindInfo structures" + }, + { + "vuid": "VUID-VkBindSparseInfo-pImageBinds-parameter", + "text": " If imageBindCount is not 0, pImageBinds must be a valid pointer to an array of imageBindCount valid VkSparseImageMemoryBindInfo structures" + }, + { + "vuid": "VUID-VkBindSparseInfo-pSignalSemaphores-parameter", + "text": " If signalSemaphoreCount is not 0, pSignalSemaphores must be a valid pointer to an array of signalSemaphoreCount valid VkSemaphore handles" + }, + { + "vuid": "VUID-VkBindSparseInfo-commonparent", + "text": " Both of the elements of pSignalSemaphores, and the elements of pWaitSemaphores that are valid handles must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkDeviceGroupBindSparseInfo": { + "(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkDeviceGroupBindSparseInfo-resourceDeviceIndex-01118", + "text": " resourceDeviceIndex and memoryDeviceIndex must both be valid device indices." + }, + { + "vuid": "VUID-VkDeviceGroupBindSparseInfo-memoryDeviceIndex-01119", + "text": " Each memory allocation bound in this batch must have allocated an instance for memoryDeviceIndex." + }, + { + "vuid": "VUID-VkDeviceGroupBindSparseInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO" + } + ] + }, + "vkCreateAndroidSurfaceKHR": { + "(VK_KHR_surface)+(VK_KHR_android_surface)": [ + { + "vuid": "VUID-vkCreateAndroidSurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateAndroidSurfaceKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkAndroidSurfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateAndroidSurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateAndroidSurfaceKHR-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkAndroidSurfaceCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_android_surface)": [ + { + "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-window-01248", + "text": " window must point to a valid Android ANativeWindow." + }, + { + "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAndroidSurfaceCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateMirSurfaceKHR": { + "(VK_KHR_surface)+(VK_KHR_mir_surface)": [ + { + "vuid": "VUID-vkCreateMirSurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateMirSurfaceKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkMirSurfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateMirSurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateMirSurfaceKHR-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkMirSurfaceCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_mir_surface)": [ + { + "vuid": "VUID-VkMirSurfaceCreateInfoKHR-connection-01263", + "text": " connection must point to a valid MirConnection." + }, + { + "vuid": "VUID-VkMirSurfaceCreateInfoKHR-surface-01264", + "text": " surface must point to a valid MirSurface." + }, + { + "vuid": "VUID-VkMirSurfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkMirSurfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkMirSurfaceCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateWaylandSurfaceKHR": { + "(VK_KHR_surface)+(VK_KHR_wayland_surface)": [ + { + "vuid": "VUID-vkCreateWaylandSurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateWaylandSurfaceKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkWaylandSurfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateWaylandSurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateWaylandSurfaceKHR-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkWaylandSurfaceCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_wayland_surface)": [ + { + "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-display-01304", + "text": " display must point to a valid Wayland wl_display." + }, + { + "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-surface-01305", + "text": " surface must point to a valid Wayland wl_surface." + }, + { + "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkWaylandSurfaceCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateWin32SurfaceKHR": { + "(VK_KHR_surface)+(VK_KHR_win32_surface)": [ + { + "vuid": "VUID-vkCreateWin32SurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateWin32SurfaceKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkWin32SurfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateWin32SurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateWin32SurfaceKHR-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkWin32SurfaceCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_win32_surface)": [ + { + "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-hinstance-01307", + "text": " hinstance must be a valid Win32 HINSTANCE." + }, + { + "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-hwnd-01308", + "text": " hwnd must be a valid Win32 HWND." + }, + { + "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkWin32SurfaceCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateXcbSurfaceKHR": { + "(VK_KHR_surface)+(VK_KHR_xcb_surface)": [ + { + "vuid": "VUID-vkCreateXcbSurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateXcbSurfaceKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkXcbSurfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateXcbSurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateXcbSurfaceKHR-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkXcbSurfaceCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_xcb_surface)": [ + { + "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-connection-01310", + "text": " connection must point to a valid X11 xcb_connection_t." + }, + { + "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-window-01311", + "text": " window must be a valid X11 xcb_window_t." + }, + { + "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkXcbSurfaceCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateXlibSurfaceKHR": { + "(VK_KHR_surface)+(VK_KHR_xlib_surface)": [ + { + "vuid": "VUID-vkCreateXlibSurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateXlibSurfaceKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkXlibSurfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateXlibSurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateXlibSurfaceKHR-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkXlibSurfaceCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_xlib_surface)": [ + { + "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-dpy-01313", + "text": " dpy must point to a valid Xlib Display." + }, + { + "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-window-01314", + "text": " window must be a valid Xlib Window." + }, + { + "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkXlibSurfaceCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateIOSSurfaceMVK": { + "(VK_KHR_surface)+(VK_MVK_ios_surface)": [ + { + "vuid": "VUID-vkCreateIOSSurfaceMVK-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateIOSSurfaceMVK-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkIOSSurfaceCreateInfoMVK structure" + }, + { + "vuid": "VUID-vkCreateIOSSurfaceMVK-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateIOSSurfaceMVK-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkIOSSurfaceCreateInfoMVK": { + "(VK_KHR_surface)+(VK_MVK_ios_surface)": [ + { + "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-pView-01316", + "text": " pView must be a valid UIView and must be backed by a CALayer instance of type CAMetalLayer." + }, + { + "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK" + }, + { + "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkIOSSurfaceCreateInfoMVK-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateMacOSSurfaceMVK": { + "(VK_KHR_surface)+(VK_MVK_macos_surface)": [ + { + "vuid": "VUID-vkCreateMacOSSurfaceMVK-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateMacOSSurfaceMVK-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkMacOSSurfaceCreateInfoMVK structure" + }, + { + "vuid": "VUID-vkCreateMacOSSurfaceMVK-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateMacOSSurfaceMVK-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkMacOSSurfaceCreateInfoMVK": { + "(VK_KHR_surface)+(VK_MVK_macos_surface)": [ + { + "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-pView-01317", + "text": " pView must be a valid NSView and must be backed by a CALayer instance of type CAMetalLayer." + }, + { + "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK" + }, + { + "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkMacOSSurfaceCreateInfoMVK-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkCreateViSurfaceNN": { + "(VK_KHR_surface)+(VK_NN_vi_surface)": [ + { + "vuid": "VUID-vkCreateViSurfaceNN-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateViSurfaceNN-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkViSurfaceCreateInfoNN structure" + }, + { + "vuid": "VUID-vkCreateViSurfaceNN-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateViSurfaceNN-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkViSurfaceCreateInfoNN": { + "(VK_KHR_surface)+(VK_NN_vi_surface)": [ + { + "vuid": "VUID-VkViSurfaceCreateInfoNN-window-01318", + "text": " window must be a valid nn::vi::NativeWindowHandle" + }, + { + "vuid": "VUID-VkViSurfaceCreateInfoNN-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN" + }, + { + "vuid": "VUID-VkViSurfaceCreateInfoNN-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkViSurfaceCreateInfoNN-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkDestroySurfaceKHR": { + "(VK_KHR_surface)": [ + { + "vuid": "VUID-vkDestroySurfaceKHR-surface-01266", + "text": " All VkSwapchainKHR objects created for surface must have been destroyed prior to destroying surface" + }, + { + "vuid": "VUID-vkDestroySurfaceKHR-surface-01267", + "text": " If VkAllocationCallbacks were provided when surface was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroySurfaceKHR-surface-01268", + "text": " If no VkAllocationCallbacks were provided when surface was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroySurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkDestroySurfaceKHR-surface-parameter", + "text": " If surface is not VK_NULL_HANDLE, surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkDestroySurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroySurfaceKHR-surface-parent", + "text": " If surface is a valid handle, it must have been created, allocated, or retrieved from instance" + } + ] + }, + "vkGetPhysicalDeviceDisplayPropertiesKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceDisplayPropertiesKHR-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayPropertiesKHR structures" + } + ] + }, + "vkAcquireXlibDisplayEXT": { + "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)+(VK_EXT_acquire_xlib_display)": [ + { + "vuid": "VUID-vkAcquireXlibDisplayEXT-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkAcquireXlibDisplayEXT-dpy-parameter", + "text": " dpy must be a valid pointer to a Display value" + }, + { + "vuid": "VUID-vkAcquireXlibDisplayEXT-display-parameter", + "text": " display must be a valid VkDisplayKHR handle" + } + ] + }, + "vkGetRandROutputDisplayEXT": { + "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)+(VK_EXT_acquire_xlib_display)": [ + { + "vuid": "VUID-vkGetRandROutputDisplayEXT-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetRandROutputDisplayEXT-dpy-parameter", + "text": " dpy must be a valid pointer to a Display value" + }, + { + "vuid": "VUID-vkGetRandROutputDisplayEXT-pDisplay-parameter", + "text": " pDisplay must be a valid pointer to a VkDisplayKHR handle" + } + ] + }, + "vkReleaseDisplayEXT": { + "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_direct_mode_display)": [ + { + "vuid": "VUID-vkReleaseDisplayEXT-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkReleaseDisplayEXT-display-parameter", + "text": " display must be a valid VkDisplayKHR handle" + } + ] + }, + "vkGetPhysicalDeviceDisplayPlanePropertiesKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceDisplayPlanePropertiesKHR-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayPlanePropertiesKHR structures" + } + ] + }, + "vkGetDisplayPlaneSupportedDisplaysKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-planeIndex-01249", + "text": " planeIndex must be less than the number of display planes supported by the device as determined by calling vkGetPhysicalDeviceDisplayPlanePropertiesKHR" + }, + { + "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplayCount-parameter", + "text": " pDisplayCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetDisplayPlaneSupportedDisplaysKHR-pDisplays-parameter", + "text": " If the value referenced by pDisplayCount is not 0, and pDisplays is not NULL, pDisplays must be a valid pointer to an array of pDisplayCount VkDisplayKHR handles" + } + ] + }, + "vkGetDisplayModePropertiesKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-vkGetDisplayModePropertiesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetDisplayModePropertiesKHR-display-parameter", + "text": " display must be a valid VkDisplayKHR handle" + }, + { + "vuid": "VUID-vkGetDisplayModePropertiesKHR-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetDisplayModePropertiesKHR-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkDisplayModePropertiesKHR structures" + } + ] + }, + "vkCreateDisplayModeKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-vkCreateDisplayModeKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkCreateDisplayModeKHR-display-parameter", + "text": " display must be a valid VkDisplayKHR handle" + }, + { + "vuid": "VUID-vkCreateDisplayModeKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDisplayModeCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateDisplayModeKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDisplayModeKHR-pMode-parameter", + "text": " pMode must be a valid pointer to a VkDisplayModeKHR handle" + } + ] + }, + "VkDisplayModeCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-VkDisplayModeCreateInfoKHR-width-01250", + "text": " The width and height members of the visibleRegion member of parameters must be greater than 0" + }, + { + "vuid": "VUID-VkDisplayModeCreateInfoKHR-refreshRate-01251", + "text": " The refreshRate member of parameters must be greater than 0" + }, + { + "vuid": "VUID-VkDisplayModeCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkDisplayModeCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDisplayModeCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + } + ] + }, + "vkGetDisplayPlaneCapabilitiesKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-vkGetDisplayPlaneCapabilitiesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetDisplayPlaneCapabilitiesKHR-mode-parameter", + "text": " mode must be a valid VkDisplayModeKHR handle" + }, + { + "vuid": "VUID-vkGetDisplayPlaneCapabilitiesKHR-pCapabilities-parameter", + "text": " pCapabilities must be a valid pointer to a VkDisplayPlaneCapabilitiesKHR structure" + } + ] + }, + "vkDisplayPowerControlEXT": { + "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_display_control)": [ + { + "vuid": "VUID-vkDisplayPowerControlEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDisplayPowerControlEXT-display-parameter", + "text": " display must be a valid VkDisplayKHR handle" + }, + { + "vuid": "VUID-vkDisplayPowerControlEXT-pDisplayPowerInfo-parameter", + "text": " pDisplayPowerInfo must be a valid pointer to a valid VkDisplayPowerInfoEXT structure" + } + ] + }, + "VkDisplayPowerInfoEXT": { + "(VK_KHR_surface)+(VK_KHR_display)+(VK_EXT_display_control)": [ + { + "vuid": "VUID-VkDisplayPowerInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT" + }, + { + "vuid": "VUID-VkDisplayPowerInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDisplayPowerInfoEXT-powerState-parameter", + "text": " powerState must be a valid VkDisplayPowerStateEXT value" + } + ] + }, + "vkCreateDisplayPlaneSurfaceKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDisplaySurfaceCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDisplayPlaneSurfaceKHR-pSurface-parameter", + "text": " pSurface must be a valid pointer to a VkSurfaceKHR handle" + } + ] + }, + "VkDisplaySurfaceCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_display)": [ + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-planeIndex-01252", + "text": " planeIndex must be less than the number of display planes supported by the device as determined by calling vkGetPhysicalDeviceDisplayPlanePropertiesKHR" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-planeReorderPossible-01253", + "text": " If the planeReorderPossible member of the VkDisplayPropertiesKHR structure returned by vkGetPhysicalDeviceDisplayPropertiesKHR for the display corresponding to displayMode is VK_TRUE then planeStackIndex must be less than the number of display planes supported by the device as determined by calling vkGetPhysicalDeviceDisplayPlanePropertiesKHR; otherwise planeStackIndex must equal the currentStackIndex member of VkDisplayPlanePropertiesKHR returned by vkGetPhysicalDeviceDisplayPlanePropertiesKHR for the display plane corresponding to displayMode" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01254", + "text": " If alphaMode is VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR then globalAlpha must be between 0 and 1, inclusive" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-01255", + "text": " alphaMode must be 0 or one of the bits present in the supportedAlpha member of VkDisplayPlaneCapabilitiesKHR returned by vkGetDisplayPlaneCapabilitiesKHR for the display plane corresponding to displayMode" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-width-01256", + "text": " The width and height members of imageExtent must be less than the maxImageDimensions2D member of VkPhysicalDeviceLimits" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-displayMode-parameter", + "text": " displayMode must be a valid VkDisplayModeKHR handle" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-transform-parameter", + "text": " transform must be a valid VkSurfaceTransformFlagBitsKHR value" + }, + { + "vuid": "VUID-VkDisplaySurfaceCreateInfoKHR-alphaMode-parameter", + "text": " alphaMode must be a valid VkDisplayPlaneAlphaFlagBitsKHR value" + } + ] + }, + "vkGetPhysicalDeviceSurfaceSupportKHR": { + "(VK_KHR_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-queueFamilyIndex-01269", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-pSupported-parameter", + "text": " pSupported must be a valid pointer to a VkBool32 value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceSupportKHR-commonparent", + "text": " Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetPhysicalDeviceMirPresentationSupportKHR": { + "(VK_KHR_surface)+(VK_KHR_mir_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceMirPresentationSupportKHR-queueFamilyIndex-01265", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceMirPresentationSupportKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceMirPresentationSupportKHR-connection-parameter", + "text": " connection must be a valid pointer to a MirConnection value" + } + ] + }, + "vkGetPhysicalDeviceWaylandPresentationSupportKHR": { + "(VK_KHR_surface)+(VK_KHR_wayland_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-queueFamilyIndex-01306", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceWaylandPresentationSupportKHR-display-parameter", + "text": " display must be a valid pointer to a wl_display value" + } + ] + }, + "vkGetPhysicalDeviceWin32PresentationSupportKHR": { + "(VK_KHR_surface)+(VK_KHR_win32_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-queueFamilyIndex-01309", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceWin32PresentationSupportKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + } + ] + }, + "vkGetPhysicalDeviceXcbPresentationSupportKHR": { + "(VK_KHR_surface)+(VK_KHR_xcb_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-queueFamilyIndex-01312", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceXcbPresentationSupportKHR-connection-parameter", + "text": " connection must be a valid pointer to a xcb_connection_t value" + } + ] + }, + "vkGetPhysicalDeviceXlibPresentationSupportKHR": { + "(VK_KHR_surface)+(VK_KHR_xlib_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-queueFamilyIndex-01315", + "text": " queueFamilyIndex must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the given physicalDevice" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceXlibPresentationSupportKHR-dpy-parameter", + "text": " dpy must be a valid pointer to a Display value" + } + ] + }, + "vkGetPhysicalDeviceSurfaceCapabilitiesKHR": { + "(VK_KHR_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-pSurfaceCapabilities-parameter", + "text": " pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilitiesKHR structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilitiesKHR-commonparent", + "text": " Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetPhysicalDeviceSurfaceCapabilities2KHR": { + "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceInfo-parameter", + "text": " pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2KHR-pSurfaceCapabilities-parameter", + "text": " pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilities2KHR structure" + } + ] + }, + "VkPhysicalDeviceSurfaceInfo2KHR": { + "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [ + { + "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR" + }, + { + "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPhysicalDeviceSurfaceInfo2KHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + } + ] + }, + "VkSurfaceCapabilities2KHR": { + "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [ + { + "vuid": "VUID-VkSurfaceCapabilities2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR" + }, + { + "vuid": "VUID-VkSurfaceCapabilities2KHR-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkSharedPresentSurfaceCapabilitiesKHR" + } + ] + }, + "VkSharedPresentSurfaceCapabilitiesKHR": { + "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkSharedPresentSurfaceCapabilitiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR" + } + ] + }, + "vkGetPhysicalDeviceSurfaceCapabilities2EXT": { + "(VK_KHR_surface)+(VK_EXT_display_surface_counter)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-pSurfaceCapabilities-parameter", + "text": " pSurfaceCapabilities must be a valid pointer to a VkSurfaceCapabilities2EXT structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceCapabilities2EXT-commonparent", + "text": " Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "VkSurfaceCapabilities2EXT": { + "(VK_KHR_surface)+(VK_EXT_display_surface_counter)": [ + { + "vuid": "VUID-VkSurfaceCapabilities2EXT-supportedSurfaceCounters-01246", + "text": " supportedSurfaceCounters must not include VK_SURFACE_COUNTER_VBLANK_EXT unless the surface queried is a &amp;lt;&amp;lt;wsi-display-surfaces,display surface&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkSurfaceCapabilities2EXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT" + }, + { + "vuid": "VUID-VkSurfaceCapabilities2EXT-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetPhysicalDeviceSurfaceFormatsKHR": { + "(VK_KHR_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormatCount-parameter", + "text": " pSurfaceFormatCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-pSurfaceFormats-parameter", + "text": " If the value referenced by pSurfaceFormatCount is not 0, and pSurfaceFormats is not NULL, pSurfaceFormats must be a valid pointer to an array of pSurfaceFormatCount VkSurfaceFormatKHR structures" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormatsKHR-commonparent", + "text": " Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetPhysicalDeviceSurfaceFormats2KHR": { + "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceInfo-parameter", + "text": " pSurfaceInfo must be a valid pointer to a valid VkPhysicalDeviceSurfaceInfo2KHR structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormatCount-parameter", + "text": " pSurfaceFormatCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfaceFormats2KHR-pSurfaceFormats-parameter", + "text": " If the value referenced by pSurfaceFormatCount is not 0, and pSurfaceFormats is not NULL, pSurfaceFormats must be a valid pointer to an array of pSurfaceFormatCount VkSurfaceFormat2KHR structures" + } + ] + }, + "VkSurfaceFormat2KHR": { + "(VK_KHR_surface)+(VK_KHR_get_surface_capabilities2)": [ + { + "vuid": "VUID-VkSurfaceFormat2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR" + }, + { + "vuid": "VUID-VkSurfaceFormat2KHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetPhysicalDeviceSurfacePresentModesKHR": { + "(VK_KHR_surface)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModeCount-parameter", + "text": " pPresentModeCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-pPresentModes-parameter", + "text": " If the value referenced by pPresentModeCount is not 0, and pPresentModes is not NULL, pPresentModes must be a valid pointer to an array of pPresentModeCount VkPresentModeKHR values" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceSurfacePresentModesKHR-commonparent", + "text": " Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetDeviceGroupPresentCapabilitiesKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceGroupPresentCapabilitiesKHR-pDeviceGroupPresentCapabilities-parameter", + "text": " pDeviceGroupPresentCapabilities must be a valid pointer to a VkDeviceGroupPresentCapabilitiesKHR structure" + } + ] + }, + "VkDeviceGroupPresentCapabilitiesKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkDeviceGroupPresentCapabilitiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR" + }, + { + "vuid": "VUID-VkDeviceGroupPresentCapabilitiesKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetDeviceGroupSurfacePresentModesKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-pModes-parameter", + "text": " pModes must be a valid pointer to a VkDeviceGroupPresentModeFlagsKHR value" + }, + { + "vuid": "VUID-vkGetDeviceGroupSurfacePresentModesKHR-commonparent", + "text": " Both of device, and surface must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetPhysicalDevicePresentRectanglesKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRectCount-parameter", + "text": " pRectCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-pRects-parameter", + "text": " If the value referenced by pRectCount is not 0, and pRects is not NULL, pRects must be a valid pointer to an array of pRectCount VkRect2D structures" + }, + { + "vuid": "VUID-vkGetPhysicalDevicePresentRectanglesKHR-commonparent", + "text": " Both of physicalDevice, and surface must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetRefreshCycleDurationGOOGLE": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_GOOGLE_display_timing)": [ + { + "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-pDisplayTimingProperties-parameter", + "text": " pDisplayTimingProperties must be a valid pointer to a VkRefreshCycleDurationGOOGLE structure" + }, + { + "vuid": "VUID-vkGetRefreshCycleDurationGOOGLE-commonparent", + "text": " Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetPastPresentationTimingGOOGLE": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_GOOGLE_display_timing)": [ + { + "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimingCount-parameter", + "text": " pPresentationTimingCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-pPresentationTimings-parameter", + "text": " If the value referenced by pPresentationTimingCount is not 0, and pPresentationTimings is not NULL, pPresentationTimings must be a valid pointer to an array of pPresentationTimingCount VkPastPresentationTimingGOOGLE structures" + }, + { + "vuid": "VUID-vkGetPastPresentationTimingGOOGLE-commonparent", + "text": " Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkGetSwapchainStatusKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-vkGetSwapchainStatusKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetSwapchainStatusKHR-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-vkGetSwapchainStatusKHR-commonparent", + "text": " Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkCreateSwapchainKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-vkCreateSwapchainKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateSwapchainKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkSwapchainCreateInfoKHR structure" + }, + { + "vuid": "VUID-vkCreateSwapchainKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateSwapchainKHR-pSwapchain-parameter", + "text": " pSwapchain must be a valid pointer to a VkSwapchainKHR handle" + } + ] + }, + "VkSwapchainCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-surface-01270", + "text": " surface must be a surface that is supported by the device as determined using vkGetPhysicalDeviceSurfaceSupportKHR" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-minImageCount-01271", + "text": " minImageCount must be greater than or equal to the value returned in the minImageCount member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-minImageCount-01272", + "text": " minImageCount must be less than or equal to the value returned in the maxImageCount member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface if the returned maxImageCount is not zero" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageFormat-01273", + "text": " imageFormat and imageColorSpace must match the format and colorSpace members, respectively, of one of the VkSurfaceFormatKHR structures returned by vkGetPhysicalDeviceSurfaceFormatsKHR for the surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageExtent-01274", + "text": " imageExtent must be between minImageExtent and maxImageExtent, inclusive, where minImageExtent and maxImageExtent are members of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageExtent-01689", + "text": " imageExtent members width and height must both be non-zero" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageArrayLayers-01275", + "text": " imageArrayLayers must be greater than 0 and less than or equal to the maxImageArrayLayers member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01277", + "text": " If imageSharingMode is VK_SHARING_MODE_CONCURRENT, pQueueFamilyIndices must be a valid pointer to an array of queueFamilyIndexCount uint32_t values" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01278", + "text": " If imageSharingMode is VK_SHARING_MODE_CONCURRENT, queueFamilyIndexCount must be greater than 1" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-preTransform-01279", + "text": " preTransform must be one of the bits present in the supportedTransforms member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-01280", + "text": " compositeAlpha must be one of the bits present in the supportedCompositeAlpha member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-presentMode-01281", + "text": " presentMode must be one of the VkPresentModeKHR values returned by vkGetPhysicalDeviceSurfacePresentModesKHR for the surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-01933", + "text": " If oldSwapchain is not VK_NULL_HANDLE, oldSwapchain must be a non-retired swapchain associated with native window referred to by surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageFormat-01778", + "text": " imageFormat, imageUsage, imageExtent, and imageArrayLayers must be supported for VK_IMAGE_TYPE_2D VK_IMAGE_TILING_OPTIMAL images as reported by vkGetPhysicalDeviceImageFormatProperties." + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupSwapchainCreateInfoKHR or VkSwapchainCounterCreateInfoEXT" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkSwapchainCreateFlagBitsKHR values" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-surface-parameter", + "text": " surface must be a valid VkSurfaceKHR handle" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageFormat-parameter", + "text": " imageFormat must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageColorSpace-parameter", + "text": " imageColorSpace must be a valid VkColorSpaceKHR value" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-parameter", + "text": " imageUsage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-requiredbitmask", + "text": " imageUsage must not be 0" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-parameter", + "text": " imageSharingMode must be a valid VkSharingMode value" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-preTransform-parameter", + "text": " preTransform must be a valid VkSurfaceTransformFlagBitsKHR value" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-compositeAlpha-parameter", + "text": " compositeAlpha must be a valid VkCompositeAlphaFlagBitsKHR value" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-presentMode-parameter", + "text": " presentMode must be a valid VkPresentModeKHR value" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parameter", + "text": " If oldSwapchain is not VK_NULL_HANDLE, oldSwapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-oldSwapchain-parent", + "text": " If oldSwapchain is a valid handle, it must have been created, allocated, or retrieved from surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-commonparent", + "text": " Both of oldSwapchain, and surface that are valid handles must have been created, allocated, or retrieved from the same VkInstance" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-minImageCount-01383", + "text": " minImageCount must be 1 if presentMode is either VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR or VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-presentMode-01427", + "text": " If presentMode is VK_PRESENT_MODE_IMMEDIATE_KHR, VK_PRESENT_MODE_MAILBOX_KHR, VK_PRESENT_MODE_FIFO_KHR or VK_PRESENT_MODE_FIFO_RELAXED_KHR, imageUsage must be a subset of the supported usage flags present in the supportedUsageFlags member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for surface" + }, + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-01384", + "text": " If presentMode is VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR or VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, imageUsage must be a subset of the supported usage flags present in the sharedPresentSupportedUsageFlags member of the VkSharedPresentSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilities2KHR for surface" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)+!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageUsage-01276", + "text": " imageUsage must be a subset of the supported usage flags present in the supportedUsageFlags member of the VkSurfaceCapabilitiesKHR structure returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR for the surface" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)+!(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01393", + "text": " If imageSharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties for the physicalDevice that was used to create device" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-imageSharingMode-01428", + "text": " If imageSharingMode is VK_SHARING_MODE_CONCURRENT, each element of pQueueFamilyIndices must be unique and must be less than pQueueFamilyPropertyCount returned by either vkGetPhysicalDeviceQueueFamilyProperties or vkGetPhysicalDeviceQueueFamilyProperties2 for the physicalDevice that was used to create device" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkSwapchainCreateInfoKHR-physicalDeviceCount-01429", + "text": " If the logical device was created with VkDeviceGroupDeviceCreateInfo::physicalDeviceCount equal to 1, flags must not contain VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR" + } + ] + }, + "VkDeviceGroupSwapchainCreateInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkDeviceGroupSwapchainCreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-parameter", + "text": " modes must be a valid combination of VkDeviceGroupPresentModeFlagBitsKHR values" + }, + { + "vuid": "VUID-VkDeviceGroupSwapchainCreateInfoKHR-modes-requiredbitmask", + "text": " modes must not be 0" + } + ] + }, + "VkSwapchainCounterCreateInfoEXT": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_display_control)": [ + { + "vuid": "VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-01244", + "text": " The bits in surfaceCounters must be supported by VkSwapchainCreateInfoKHR::surface, as reported by vkGetPhysicalDeviceSurfaceCapabilities2EXT." + }, + { + "vuid": "VUID-VkSwapchainCounterCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkSwapchainCounterCreateInfoEXT-surfaceCounters-parameter", + "text": " surfaceCounters must be a valid combination of VkSurfaceCounterFlagBitsEXT values" + } + ] + }, + "vkGetSwapchainCounterEXT": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_display_control)": [ + { + "vuid": "VUID-vkGetSwapchainCounterEXT-swapchain-01245", + "text": " One or more present commands on swapchain must have been processed by the presentation engine." + }, + { + "vuid": "VUID-vkGetSwapchainCounterEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetSwapchainCounterEXT-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-vkGetSwapchainCounterEXT-counter-parameter", + "text": " counter must be a valid VkSurfaceCounterFlagBitsEXT value" + }, + { + "vuid": "VUID-vkGetSwapchainCounterEXT-pCounterValue-parameter", + "text": " pCounterValue must be a valid pointer to a uint64_t value" + }, + { + "vuid": "VUID-vkGetSwapchainCounterEXT-commonparent", + "text": " Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkDestroySwapchainKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-vkDestroySwapchainKHR-swapchain-01282", + "text": " All uses of presentable images acquired from swapchain must have completed execution" + }, + { + "vuid": "VUID-vkDestroySwapchainKHR-swapchain-01283", + "text": " If VkAllocationCallbacks were provided when swapchain was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroySwapchainKHR-swapchain-01284", + "text": " If no VkAllocationCallbacks were provided when swapchain was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroySwapchainKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDestroySwapchainKHR-swapchain-parameter", + "text": " If swapchain is not VK_NULL_HANDLE, swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-vkDestroySwapchainKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroySwapchainKHR-commonparent", + "text": " Both of device, and swapchain that are valid handles must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkCreateSharedSwapchainsKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_display_swapchain)": [ + { + "vuid": "VUID-vkCreateSharedSwapchainsKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateSharedSwapchainsKHR-pCreateInfos-parameter", + "text": " pCreateInfos must be a valid pointer to an array of swapchainCount valid VkSwapchainCreateInfoKHR structures" + }, + { + "vuid": "VUID-vkCreateSharedSwapchainsKHR-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateSharedSwapchainsKHR-pSwapchains-parameter", + "text": " pSwapchains must be a valid pointer to an array of swapchainCount VkSwapchainKHR handles" + }, + { + "vuid": "VUID-vkCreateSharedSwapchainsKHR-swapchainCount-arraylength", + "text": " swapchainCount must be greater than 0" + } + ] + }, + "vkGetSwapchainImagesKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-vkGetSwapchainImagesKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetSwapchainImagesKHR-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-vkGetSwapchainImagesKHR-pSwapchainImageCount-parameter", + "text": " pSwapchainImageCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetSwapchainImagesKHR-pSwapchainImages-parameter", + "text": " If the value referenced by pSwapchainImageCount is not 0, and pSwapchainImages is not NULL, pSwapchainImages must be a valid pointer to an array of pSwapchainImageCount VkImage handles" + }, + { + "vuid": "VUID-vkGetSwapchainImagesKHR-commonparent", + "text": " Both of device, and swapchain must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkAcquireNextImageKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-vkAcquireNextImageKHR-swapchain-01285", + "text": " swapchain must not be in the retired state" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-semaphore-01286", + "text": " If semaphore is not VK_NULL_HANDLE it must be unsignaled" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-semaphore-01779", + "text": " If semaphore is not VK_NULL_HANDLE it must not have any uncompleted signal or wait operations pending" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-fence-01287", + "text": " If fence is not VK_NULL_HANDLE it must be unsignaled and must not be associated with any other queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-semaphore-01780", + "text": " semaphore and fence must not both be equal to VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-swapchain-01802", + "text": " If the number of currently acquired images is greater than the difference between the number of images in swapchain and the value of VkSurfaceCapabilitiesKHR::minImageCount as returned by a call to vkGetPhysicalDeviceSurfaceCapabilities2KHR with the surface used to create swapchain, timeout must not be UINT64_MAX" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-semaphore-parameter", + "text": " If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-fence-parameter", + "text": " If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-pImageIndex-parameter", + "text": " pImageIndex must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-semaphore-parent", + "text": " If semaphore is a valid handle, it must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-fence-parent", + "text": " If fence is a valid handle, it must have been created, allocated, or retrieved from device" + }, + { + "vuid": "VUID-vkAcquireNextImageKHR-commonparent", + "text": " Both of device, and swapchain that are valid handles must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkAcquireNextImage2KHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-vkAcquireNextImage2KHR-swapchain-01803", + "text": " If the number of currently acquired images is greater than the difference between the number of images in the swapchain member of pAcquireInfo and the value of VkSurfaceCapabilitiesKHR::minImageCount as returned by a call to vkGetPhysicalDeviceSurfaceCapabilities2KHR with the surface used to create swapchain, the timeout member of pAcquireInfo must not be UINT64_MAX" + }, + { + "vuid": "VUID-vkAcquireNextImage2KHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkAcquireNextImage2KHR-pAcquireInfo-parameter", + "text": " pAcquireInfo must be a valid pointer to a valid VkAcquireNextImageInfoKHR structure" + }, + { + "vuid": "VUID-vkAcquireNextImage2KHR-pImageIndex-parameter", + "text": " pImageIndex must be a valid pointer to a uint32_t value" + } + ] + }, + "VkAcquireNextImageInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-swapchain-01675", + "text": " swapchain must not be in the retired state" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-01288", + "text": " If semaphore is not VK_NULL_HANDLE it must be unsignaled" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-01781", + "text": " If semaphore is not VK_NULL_HANDLE it must not have any uncompleted signal or wait operations pending" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-fence-01289", + "text": " If fence is not VK_NULL_HANDLE it must be unsignaled and must not be associated with any other queue command that has not yet completed execution on that queue" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-01782", + "text": " semaphore and fence must not both be equal to VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-deviceMask-01290", + "text": " deviceMask must be a valid device mask" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-deviceMask-01291", + "text": " deviceMask must not be zero" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-01804", + "text": " semaphore and fence must not both be equal to VK_NULL_HANDLE." + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-swapchain-parameter", + "text": " swapchain must be a valid VkSwapchainKHR handle" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-semaphore-parameter", + "text": " If semaphore is not VK_NULL_HANDLE, semaphore must be a valid VkSemaphore handle" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-fence-parameter", + "text": " If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle" + }, + { + "vuid": "VUID-VkAcquireNextImageInfoKHR-commonparent", + "text": " Each of fence, semaphore, and swapchain that are valid handles must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkQueuePresentKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-vkQueuePresentKHR-pSwapchains-01292", + "text": " Each element of pSwapchains member of pPresentInfo must be a swapchain that is created for a surface for which presentation is supported from queue as determined using a call to vkGetPhysicalDeviceSurfaceSupportKHR" + }, + { + "vuid": "VUID-vkQueuePresentKHR-pWaitSemaphores-01294", + "text": " When a semaphore unsignal operation defined by the elements of the pWaitSemaphores member of pPresentInfo executes on queue, no other queue must be waiting on the same semaphore." + }, + { + "vuid": "VUID-vkQueuePresentKHR-pWaitSemaphores-01295", + "text": " All elements of the pWaitSemaphores member of pPresentInfo must be semaphores that are signaled, or have &amp;lt;&amp;lt;synchronization-semaphores-signaling, semaphore signal operations&amp;gt;&amp;gt; previously submitted for execution." + }, + { + "vuid": "VUID-vkQueuePresentKHR-queue-parameter", + "text": " queue must be a valid VkQueue handle" + }, + { + "vuid": "VUID-vkQueuePresentKHR-pPresentInfo-parameter", + "text": " pPresentInfo must be a valid pointer to a valid VkPresentInfoKHR structure" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_display_swapchain)": [ + { + "vuid": "VUID-vkQueuePresentKHR-pSwapchains-01293", + "text": " If more than one member of pSwapchains was created from a display surface, all display surfaces referenced that refer to the same display must use the same display mode" + } + ] + }, + "VkPresentInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+!(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkPresentInfoKHR-pImageIndices-01296", + "text": " Each element of pImageIndices must be the index of a presentable image acquired from the swapchain specified by the corresponding element of the pSwapchains array, and the presented image subresource must be in the VK_IMAGE_LAYOUT_PRESENT_SRC_KHR layout at the time the operation is executed on a VkDevice" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_shared_presentable_image)": [ + { + "vuid": "VUID-VkPresentInfoKHR-pImageIndices-01430", + "text": " Each element of pImageIndices must be the index of a presentable image acquired from the swapchain specified by the corresponding element of the pSwapchains array, and the presented image subresource must be in the VK_IMAGE_LAYOUT_PRESENT_SRC_KHR or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR layout at the time the operation is executed on a VkDevice" + } + ], + "(VK_KHR_surface)+(VK_KHR_swapchain)": [ + { + "vuid": "VUID-VkPresentInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PRESENT_INFO_KHR" + }, + { + "vuid": "VUID-VkPresentInfoKHR-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceGroupPresentInfoKHR, VkDisplayPresentInfoKHR, VkPresentRegionsKHR, or VkPresentTimesInfoGOOGLE" + }, + { + "vuid": "VUID-VkPresentInfoKHR-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkPresentInfoKHR-pWaitSemaphores-parameter", + "text": " If waitSemaphoreCount is not 0, pWaitSemaphores must be a valid pointer to an array of waitSemaphoreCount valid VkSemaphore handles" + }, + { + "vuid": "VUID-VkPresentInfoKHR-pSwapchains-parameter", + "text": " pSwapchains must be a valid pointer to an array of swapchainCount valid VkSwapchainKHR handles" + }, + { + "vuid": "VUID-VkPresentInfoKHR-pImageIndices-parameter", + "text": " pImageIndices must be a valid pointer to an array of swapchainCount uint32_t values" + }, + { + "vuid": "VUID-VkPresentInfoKHR-pResults-parameter", + "text": " If pResults is not NULL, pResults must be a valid pointer to an array of swapchainCount VkResult values" + }, + { + "vuid": "VUID-VkPresentInfoKHR-swapchainCount-arraylength", + "text": " swapchainCount must be greater than 0" + }, + { + "vuid": "VUID-VkPresentInfoKHR-commonparent", + "text": " Both of the elements of pSwapchains, and the elements of pWaitSemaphores that are valid handles must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "VkPresentRegionsKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_incremental_present)": [ + { + "vuid": "VUID-VkPresentRegionsKHR-swapchainCount-01260", + "text": " swapchainCount must be the same value as VkPresentInfoKHR::swapchainCount, where VkPresentInfoKHR is in the pNext-chain of this VkPresentRegionsKHR structure." + }, + { + "vuid": "VUID-VkPresentRegionsKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR" + }, + { + "vuid": "VUID-VkPresentRegionsKHR-pRegions-parameter", + "text": " If pRegions is not NULL, pRegions must be a valid pointer to an array of swapchainCount valid VkPresentRegionKHR structures" + }, + { + "vuid": "VUID-VkPresentRegionsKHR-swapchainCount-arraylength", + "text": " swapchainCount must be greater than 0" + } + ] + }, + "VkPresentRegionKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_incremental_present)": [ + { + "vuid": "VUID-VkPresentRegionKHR-pRectangles-parameter", + "text": " If rectangleCount is not 0, and pRectangles is not NULL, pRectangles must be a valid pointer to an array of rectangleCount VkRectLayerKHR structures" + } + ] + }, + "VkRectLayerKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_incremental_present)": [ + { + "vuid": "VUID-VkRectLayerKHR-offset-01261", + "text": " The sum of offset and extent must be no greater than the imageExtent member of the VkSwapchainCreateInfoKHR structure given to vkCreateSwapchainKHR." + }, + { + "vuid": "VUID-VkRectLayerKHR-layer-01262", + "text": " layer must be less than imageArrayLayers member of the VkSwapchainCreateInfoKHR structure given to vkCreateSwapchainKHR." + } + ] + }, + "VkDisplayPresentInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_KHR_display_swapchain)": [ + { + "vuid": "VUID-VkDisplayPresentInfoKHR-srcRect-01257", + "text": " srcRect must specify a rectangular region that is a subset of the image being presented" + }, + { + "vuid": "VUID-VkDisplayPresentInfoKHR-dstRect-01258", + "text": " dstRect must specify a rectangular region that is a subset of the visibleRegion parameter of the display mode the swapchain being presented uses" + }, + { + "vuid": "VUID-VkDisplayPresentInfoKHR-persistentContent-01259", + "text": " If the persistentContent member of the VkDisplayPropertiesKHR structure returned by vkGetPhysicalDeviceDisplayPropertiesKHR for the display the present operation targets then persistent must be VK_FALSE" + }, + { + "vuid": "VUID-VkDisplayPresentInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR" + } + ] + }, + "VkDeviceGroupPresentInfoKHR": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_VERSION_1_1,VK_KHR_device_group)": [ + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-swapchainCount-01297", + "text": " swapchainCount must equal 0 or VkPresentInfoKHR::swapchainCount" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01298", + "text": " If mode is VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, then each element of pDeviceMasks must have exactly one bit set, and the corresponding element of VkDeviceGroupPresentCapabilitiesKHR::presentMask must be non-zero" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01299", + "text": " If mode is VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, then each element of pDeviceMasks must have exactly one bit set, and some physical device in the logical device must include that bit in its VkDeviceGroupPresentCapabilitiesKHR::presentMask." + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01300", + "text": " If mode is VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, then each element of pDeviceMasks must have a value for which all set bits are set in one of the elements of VkDeviceGroupPresentCapabilitiesKHR::presentMask" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01301", + "text": " If mode is VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR, then for each bit set in each element of pDeviceMasks, the corresponding element of VkDeviceGroupPresentCapabilitiesKHR::presentMask must be non-zero" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-01302", + "text": " The value of each element of pDeviceMasks must be equal to the device mask passed in VkAcquireNextImageInfoKHR::deviceMask when the image index was last acquired" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-01303", + "text": " mode must have exactly one bit set, and that bit must have been included in VkDeviceGroupSwapchainCreateInfoKHR::modes" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-pDeviceMasks-parameter", + "text": " If swapchainCount is not 0, pDeviceMasks must be a valid pointer to an array of swapchainCount uint32_t values" + }, + { + "vuid": "VUID-VkDeviceGroupPresentInfoKHR-mode-parameter", + "text": " mode must be a valid VkDeviceGroupPresentModeFlagBitsKHR value" + } + ] + }, + "VkPresentTimesInfoGOOGLE": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_GOOGLE_display_timing)": [ + { + "vuid": "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-01247", + "text": " swapchainCount must be the same value as VkPresentInfoKHR::swapchainCount, where VkPresentInfoKHR is in the pNext chain of this VkPresentTimesInfoGOOGLE structure." + }, + { + "vuid": "VUID-VkPresentTimesInfoGOOGLE-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE" + }, + { + "vuid": "VUID-VkPresentTimesInfoGOOGLE-pTimes-parameter", + "text": " If pTimes is not NULL, pTimes must be a valid pointer to an array of swapchainCount VkPresentTimeGOOGLE structures" + }, + { + "vuid": "VUID-VkPresentTimesInfoGOOGLE-swapchainCount-arraylength", + "text": " swapchainCount must be greater than 0" + } + ] + }, + "vkSetHdrMetadataEXT": { + "(VK_KHR_surface)+(VK_KHR_swapchain)+(VK_EXT_hdr_metadata)": [ + { + "vuid": "VUID-vkSetHdrMetadataEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkSetHdrMetadataEXT-pSwapchains-parameter", + "text": " pSwapchains must be a valid pointer to an array of swapchainCount valid VkSwapchainKHR handles" + }, + { + "vuid": "VUID-vkSetHdrMetadataEXT-pMetadata-parameter", + "text": " pMetadata must be a valid pointer to an array of swapchainCount valid VkHdrMetadataEXT structures" + }, + { + "vuid": "VUID-vkSetHdrMetadataEXT-swapchainCount-arraylength", + "text": " swapchainCount must be greater than 0" + }, + { + "vuid": "VUID-vkSetHdrMetadataEXT-commonparent", + "text": " Both of device, and the elements of pSwapchains must have been created, allocated, or retrieved from the same VkInstance" + } + ] + }, + "vkEnumerateInstanceLayerProperties": { + "core": [ + { + "vuid": "VUID-vkEnumerateInstanceLayerProperties-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkEnumerateInstanceLayerProperties-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkLayerProperties structures" + } + ] + }, + "vkEnumerateDeviceLayerProperties": { + "core": [ + { + "vuid": "VUID-vkEnumerateDeviceLayerProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkEnumerateDeviceLayerProperties-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkEnumerateDeviceLayerProperties-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkLayerProperties structures" + } + ] + }, + "vkEnumerateInstanceExtensionProperties": { + "core": [ + { + "vuid": "VUID-vkEnumerateInstanceExtensionProperties-pLayerName-parameter", + "text": " If pLayerName is not NULL, pLayerName must be a null-terminated UTF-8 string" + }, + { + "vuid": "VUID-vkEnumerateInstanceExtensionProperties-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkEnumerateInstanceExtensionProperties-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkExtensionProperties structures" + } + ] + }, + "vkEnumerateDeviceExtensionProperties": { + "core": [ + { + "vuid": "VUID-vkEnumerateDeviceExtensionProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkEnumerateDeviceExtensionProperties-pLayerName-parameter", + "text": " If pLayerName is not NULL, pLayerName must be a null-terminated UTF-8 string" + }, + { + "vuid": "VUID-vkEnumerateDeviceExtensionProperties-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkEnumerateDeviceExtensionProperties-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkExtensionProperties structures" + } + ] + }, + "vkGetPhysicalDeviceFeatures": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceFeatures-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFeatures-pFeatures-parameter", + "text": " pFeatures must be a valid pointer to a VkPhysicalDeviceFeatures structure" + } + ] + }, + "vkGetPhysicalDeviceFeatures2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceFeatures2-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFeatures2-pFeatures-parameter", + "text": " pFeatures must be a valid pointer to a VkPhysicalDeviceFeatures2 structure" + } + ] + }, + "VkPhysicalDeviceFeatures2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkPhysicalDeviceFeatures2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2" + } + ] + }, + "VkPhysicalDeviceVariablePointerFeatures": { + "(VK_VERSION_1_1,VK_KHR_variable_pointers)": [ + { + "vuid": "VUID-VkPhysicalDeviceVariablePointerFeatures-variablePointers-01431", + "text": " If variablePointers is enabled then variablePointersStorageBuffer must also be enabled." + }, + { + "vuid": "VUID-VkPhysicalDeviceVariablePointerFeatures-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES" + } + ] + }, + "VkPhysicalDeviceMultiviewFeatures": { + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-VkPhysicalDeviceMultiviewFeatures-multiviewGeometryShader-00580", + "text": " If multiviewGeometryShader is enabled then multiview must also be enabled." + }, + { + "vuid": "VUID-VkPhysicalDeviceMultiviewFeatures-multiviewTessellationShader-00581", + "text": " If multiviewTessellationShader is enabled then multiview must also be enabled." + }, + { + "vuid": "VUID-VkPhysicalDeviceMultiviewFeatures-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES" + } + ] + }, + "VkPhysicalDevice16BitStorageFeatures": { + "(VK_VERSION_1_1,VK_KHR_16bit_storage)": [ + { + "vuid": "VUID-VkPhysicalDevice16BitStorageFeatures-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES" + } + ] + }, + "VkPhysicalDeviceSamplerYcbcrConversionFeatures": { + "(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkPhysicalDeviceSamplerYcbcrConversionFeatures-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES" + } + ] + }, + "VkPhysicalDeviceProtectedMemoryFeatures": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkPhysicalDeviceProtectedMemoryFeatures-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES" + } + ] + }, + "VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT": { + "(VK_EXT_blend_operation_advanced)": [ + { + "vuid": "VUID-VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT" + } + ] + }, + "VkPhysicalDeviceShaderDrawParameterFeatures": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderDrawParameterFeatures-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES" + } + ] + }, + "VkPhysicalDeviceDescriptorIndexingFeaturesEXT": { + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkPhysicalDeviceDescriptorIndexingFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT" + } + ] + }, + "VkPhysicalDevicePushDescriptorPropertiesKHR": { + "(VK_KHR_push_descriptor)": [ + { + "vuid": "VUID-VkPhysicalDevicePushDescriptorPropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR" + } + ] + }, + "VkPhysicalDeviceMultiviewProperties": { + "(VK_VERSION_1_1,VK_KHR_multiview)": [ + { + "vuid": "VUID-VkPhysicalDeviceMultiviewProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES" + } + ] + }, + "VkPhysicalDeviceDiscardRectanglePropertiesEXT": { + "(VK_EXT_discard_rectangles)": [ + { + "vuid": "VUID-VkPhysicalDeviceDiscardRectanglePropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceSampleLocationsPropertiesEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkPhysicalDeviceSampleLocationsPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceExternalMemoryHostPropertiesEXT": { + "(VK_EXT_external_memory_host)": [ + { + "vuid": "VUID-VkPhysicalDeviceExternalMemoryHostPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX": { + "(VK_NVX_multiview_per_view_attributes)": [ + { + "vuid": "VUID-VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX" + } + ] + }, + "VkPhysicalDevicePointClippingProperties": { + "(VK_VERSION_1_1,VK_KHR_maintenance2)": [ + { + "vuid": "VUID-VkPhysicalDevicePointClippingProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES" + } + ] + }, + "VkPhysicalDeviceSubgroupProperties": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkPhysicalDeviceSubgroupProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES" + } + ] + }, + "VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT": { + "(VK_EXT_blend_operation_advanced)": [ + { + "vuid": "VUID-VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT": { + "(VK_EXT_vertex_attribute_divisor)": [ + { + "vuid": "VUID-VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT": { + "(VK_EXT_sampler_filter_minmax)": [ + { + "vuid": "VUID-VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceProtectedMemoryProperties": { + "(VK_VERSION_1_1)": [ + { + "vuid": "VUID-VkPhysicalDeviceProtectedMemoryProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES" + } + ] + }, + "VkPhysicalDeviceMaintenance3Properties": { + "(VK_VERSION_1_1,VK_KHR_maintenance3)": [ + { + "vuid": "VUID-VkPhysicalDeviceMaintenance3Properties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES" + } + ] + }, + "VkPhysicalDeviceDescriptorIndexingPropertiesEXT": { + "(VK_EXT_descriptor_indexing)": [ + { + "vuid": "VUID-VkPhysicalDeviceDescriptorIndexingPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceConservativeRasterizationPropertiesEXT": { + "(VK_EXT_conservative_rasterization)": [ + { + "vuid": "VUID-VkPhysicalDeviceConservativeRasterizationPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT" + } + ] + }, + "VkPhysicalDeviceShaderCorePropertiesAMD": { + "(VK_AMD_shader_core_properties)": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderCorePropertiesAMD-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD" + } + ] + }, + "vkGetPhysicalDeviceMultisamplePropertiesEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-samples-parameter", + "text": " samples must be a valid VkSampleCountFlagBits value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceMultisamplePropertiesEXT-pMultisampleProperties-parameter", + "text": " pMultisampleProperties must be a valid pointer to a VkMultisamplePropertiesEXT structure" + } + ] + }, + "VkMultisamplePropertiesEXT": { + "(VK_EXT_sample_locations)": [ + { + "vuid": "VUID-VkMultisamplePropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT" + }, + { + "vuid": "VUID-VkMultisamplePropertiesEXT-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetPhysicalDeviceFormatProperties": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceFormatProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFormatProperties-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFormatProperties-pFormatProperties-parameter", + "text": " pFormatProperties must be a valid pointer to a VkFormatProperties structure" + } + ] + }, + "vkGetPhysicalDeviceFormatProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceFormatProperties2-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFormatProperties2-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceFormatProperties2-pFormatProperties-parameter", + "text": " pFormatProperties must be a valid pointer to a VkFormatProperties2 structure" + } + ] + }, + "VkFormatProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkFormatProperties2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2" + }, + { + "vuid": "VUID-VkFormatProperties2-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetPhysicalDeviceImageFormatProperties": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-type-parameter", + "text": " type must be a valid VkImageType value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-tiling-parameter", + "text": " tiling must be a valid VkImageTiling value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-parameter", + "text": " usage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-flags-parameter", + "text": " flags must be a valid combination of VkImageCreateFlagBits values" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties-pImageFormatProperties-parameter", + "text": " pImageFormatProperties must be a valid pointer to a VkImageFormatProperties structure" + } + ] + }, + "vkGetPhysicalDeviceExternalImageFormatPropertiesNV": { + "(VK_NV_external_memory_capabilities)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-type-parameter", + "text": " type must be a valid VkImageType value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-tiling-parameter", + "text": " tiling must be a valid VkImageTiling value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-parameter", + "text": " usage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-flags-parameter", + "text": " flags must be a valid combination of VkImageCreateFlagBits values" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-externalHandleType-parameter", + "text": " externalHandleType must be a valid combination of VkExternalMemoryHandleTypeFlagBitsNV values" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalImageFormatPropertiesNV-pExternalImageFormatProperties-parameter", + "text": " pExternalImageFormatProperties must be a valid pointer to a VkExternalImageFormatPropertiesNV structure" + } + ] + }, + "vkGetPhysicalDeviceImageFormatProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868", + "text": " If the pNext chain of pImageFormatProperties contains an instance of VkAndroidHardwareBufferUsageANDROID, the pNext chain of pImageFormatInfo must contain an instance of VkPhysicalDeviceExternalImageFormatInfo with handleType set to VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID." + } + ], + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatInfo-parameter", + "text": " pImageFormatInfo must be a valid pointer to a valid VkPhysicalDeviceImageFormatInfo2 structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pImageFormatProperties-parameter", + "text": " pImageFormatProperties must be a valid pointer to a VkImageFormatProperties2 structure" + } + ] + }, + "VkPhysicalDeviceImageFormatInfo2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2" + }, + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-pNext-pNext", + "text": " pNext must be NULL or a pointer to a valid instance of VkPhysicalDeviceExternalImageFormatInfo" + }, + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-format-parameter", + "text": " format must be a valid VkFormat value" + }, + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-type-parameter", + "text": " type must be a valid VkImageType value" + }, + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-tiling-parameter", + "text": " tiling must be a valid VkImageTiling value" + }, + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-usage-parameter", + "text": " usage must be a valid combination of VkImageUsageFlagBits values" + }, + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-VkPhysicalDeviceImageFormatInfo2-flags-parameter", + "text": " flags must be a valid combination of VkImageCreateFlagBits values" + } + ] + }, + "VkImageFormatProperties2": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)": [ + { + "vuid": "VUID-VkImageFormatProperties2-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2" + }, + { + "vuid": "VUID-VkImageFormatProperties2-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkAndroidHardwareBufferUsageANDROID, VkExternalImageFormatProperties, VkSamplerYcbcrConversionImageFormatProperties, or VkTextureLODGatherFormatPropertiesAMD" + }, + { + "vuid": "VUID-VkImageFormatProperties2-sType-unique", + "text": " Each sType member in the pNext chain must be unique" + } + ] + }, + "VkPhysicalDeviceExternalImageFormatInfo": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [ + { + "vuid": "VUID-VkPhysicalDeviceExternalImageFormatInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalImageFormatInfo-handleType-parameter", + "text": " If handleType is not 0, handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + } + ] + }, + "VkExternalImageFormatProperties": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [ + { + "vuid": "VUID-VkExternalImageFormatProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES" + } + ] + }, + "VkSamplerYcbcrConversionImageFormatProperties": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_VERSION_1_1,VK_KHR_sampler_ycbcr_conversion)": [ + { + "vuid": "VUID-VkSamplerYcbcrConversionImageFormatProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES" + } + ] + }, + "VkAndroidHardwareBufferUsageANDROID": { + "(VK_VERSION_1_1,VK_KHR_get_physical_device_properties2)+(VK_ANDROID_external_memory_android_hardware_buffer)": [ + { + "vuid": "VUID-VkAndroidHardwareBufferUsageANDROID-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID" + } + ] + }, + "vkGetPhysicalDeviceExternalBufferProperties": { + "(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceExternalBufferProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferInfo-parameter", + "text": " pExternalBufferInfo must be a valid pointer to a valid VkPhysicalDeviceExternalBufferInfo structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalBufferProperties-pExternalBufferProperties-parameter", + "text": " pExternalBufferProperties must be a valid pointer to a VkExternalBufferProperties structure" + } + ] + }, + "VkPhysicalDeviceExternalBufferInfo": { + "(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [ + { + "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter", + "text": " flags must be a valid combination of VkBufferCreateFlagBits values" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-usage-parameter", + "text": " usage must be a valid combination of VkBufferUsageFlagBits values" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-usage-requiredbitmask", + "text": " usage must not be 0" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter", + "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + } + ] + }, + "VkExternalBufferProperties": { + "(VK_VERSION_1_1,VK_KHR_external_memory_capabilities)": [ + { + "vuid": "VUID-VkExternalBufferProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES" + }, + { + "vuid": "VUID-VkExternalBufferProperties-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetPhysicalDeviceExternalSemaphoreProperties": { + "(VK_VERSION_1_1,VK_KHR_external_semaphore_capabilities)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreInfo-parameter", + "text": " pExternalSemaphoreInfo must be a valid pointer to a valid VkPhysicalDeviceExternalSemaphoreInfo structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalSemaphoreProperties-pExternalSemaphoreProperties-parameter", + "text": " pExternalSemaphoreProperties must be a valid pointer to a VkExternalSemaphoreProperties structure" + } + ] + }, + "VkPhysicalDeviceExternalSemaphoreInfo": { + "(VK_VERSION_1_1,VK_KHR_external_semaphore_capabilities)": [ + { + "vuid": "VUID-VkPhysicalDeviceExternalSemaphoreInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalSemaphoreInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalSemaphoreInfo-handleType-parameter", + "text": " handleType must be a valid VkExternalSemaphoreHandleTypeFlagBits value" + } + ] + }, + "VkExternalSemaphoreProperties": { + "(VK_VERSION_1_1,VK_KHR_external_semaphore_capabilities)": [ + { + "vuid": "VUID-VkExternalSemaphoreProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES" + }, + { + "vuid": "VUID-VkExternalSemaphoreProperties-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkGetPhysicalDeviceExternalFenceProperties": { + "(VK_VERSION_1_1,VK_KHR_external_fence_capabilities)": [ + { + "vuid": "VUID-vkGetPhysicalDeviceExternalFenceProperties-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceInfo-parameter", + "text": " pExternalFenceInfo must be a valid pointer to a valid VkPhysicalDeviceExternalFenceInfo structure" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceExternalFenceProperties-pExternalFenceProperties-parameter", + "text": " pExternalFenceProperties must be a valid pointer to a VkExternalFenceProperties structure" + } + ] + }, + "VkPhysicalDeviceExternalFenceInfo": { + "(VK_VERSION_1_1,VK_KHR_external_fence_capabilities)": [ + { + "vuid": "VUID-VkPhysicalDeviceExternalFenceInfo-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalFenceInfo-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalFenceInfo-handleType-parameter", + "text": " handleType must be a valid VkExternalFenceHandleTypeFlagBits value" + } + ] + }, + "VkExternalFenceProperties": { + "(VK_VERSION_1_1,VK_KHR_external_fence_capabilities)": [ + { + "vuid": "VUID-VkExternalFenceProperties-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES" + }, + { + "vuid": "VUID-VkExternalFenceProperties-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkSetDebugUtilsObjectNameEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkSetDebugUtilsObjectNameEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkSetDebugUtilsObjectNameEXT-pNameInfo-parameter", + "text": " pNameInfo must be a valid pointer to a valid VkDebugUtilsObjectNameInfoEXT structure" + } + ] + }, + "VkDebugUtilsObjectNameInfoEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-01905", + "text": " objectType must not be VK_OBJECT_TYPE_UNKNOWN" + }, + { + "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectHandle-01906", + "text": " objectHandle must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectHandle-01907", + "text": " objectHandle must be a Vulkan object of the type associated with objectType as defined in &amp;lt;&amp;lt;debugging-object-types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT" + }, + { + "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-objectType-parameter", + "text": " objectType must be a valid VkObjectType value" + }, + { + "vuid": "VUID-VkDebugUtilsObjectNameInfoEXT-pObjectName-parameter", + "text": " If pObjectName is not NULL, pObjectName must be a null-terminated UTF-8 string" + } + ] + }, + "vkSetDebugUtilsObjectTagEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkSetDebugUtilsObjectTagEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkSetDebugUtilsObjectTagEXT-pTagInfo-parameter", + "text": " pTagInfo must be a valid pointer to a valid VkDebugUtilsObjectTagInfoEXT structure" + } + ] + }, + "VkDebugUtilsObjectTagInfoEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectType-01908", + "text": " objectType must not be VK_OBJECT_TYPE_UNKNOWN" + }, + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectHandle-01909", + "text": " objectHandle must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectHandle-01910", + "text": " objectHandle must be a Vulkan object of the type associated with objectType as defined in &amp;lt;&amp;lt;debugging-object-types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT" + }, + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-objectType-parameter", + "text": " objectType must be a valid VkObjectType value" + }, + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-pTag-parameter", + "text": " pTag must be a valid pointer to an array of tagSize bytes" + }, + { + "vuid": "VUID-VkDebugUtilsObjectTagInfoEXT-tagSize-arraylength", + "text": " tagSize must be greater than 0" + } + ] + }, + "vkQueueBeginDebugUtilsLabelEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkQueueBeginDebugUtilsLabelEXT-queue-parameter", + "text": " queue must be a valid VkQueue handle" + }, + { + "vuid": "VUID-vkQueueBeginDebugUtilsLabelEXT-pLabelInfo-parameter", + "text": " pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure" + } + ] + }, + "VkDebugUtilsLabelEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-VkDebugUtilsLabelEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT" + }, + { + "vuid": "VUID-VkDebugUtilsLabelEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDebugUtilsLabelEXT-pLabelName-parameter", + "text": " pLabelName must be a null-terminated UTF-8 string" + } + ] + }, + "vkQueueEndDebugUtilsLabelEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkQueueEndDebugUtilsLabelEXT-None-01911", + "text": " There must be an outstanding vkQueueBeginDebugUtilsLabelEXT command prior to the vkQueueEndDebugUtilsLabelEXT on the queue" + }, + { + "vuid": "VUID-vkQueueEndDebugUtilsLabelEXT-queue-parameter", + "text": " queue must be a valid VkQueue handle" + } + ] + }, + "vkQueueInsertDebugUtilsLabelEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkQueueInsertDebugUtilsLabelEXT-queue-parameter", + "text": " queue must be a valid VkQueue handle" + }, + { + "vuid": "VUID-vkQueueInsertDebugUtilsLabelEXT-pLabelInfo-parameter", + "text": " pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure" + } + ] + }, + "vkCmdBeginDebugUtilsLabelEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-pLabelInfo-parameter", + "text": " pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure" + }, + { + "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdBeginDebugUtilsLabelEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + } + ] + }, + "vkCmdEndDebugUtilsLabelEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01912", + "text": " There must be an outstanding vkCmdBeginDebugUtilsLabelEXT command prior to the vkCmdEndDebugUtilsLabelEXT on the queue that commandBuffer is submitted to" + }, + { + "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01913", + "text": " If commandBuffer is a secondary command buffer, there must be an outstanding vkCmdBeginDebugUtilsLabelEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdEndDebugUtilsLabelEXT." + }, + { + "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + } + ] + }, + "vkCmdInsertDebugUtilsLabelEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-pLabelInfo-parameter", + "text": " pLabelInfo must be a valid pointer to a valid VkDebugUtilsLabelEXT structure" + }, + { + "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdInsertDebugUtilsLabelEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + } + ] + }, + "vkCreateDebugUtilsMessengerEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDebugUtilsMessengerCreateInfoEXT structure" + }, + { + "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDebugUtilsMessengerEXT-pMessenger-parameter", + "text": " pMessenger must be a valid pointer to a VkDebugUtilsMessengerEXT handle" + } + ] + }, + "VkDebugUtilsMessengerCreateInfoEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-pfnUserCallback-01914", + "text": " pfnUserCallback must be a valid PFN_vkDebugUtilsMessengerCallbackEXT" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-parameter", + "text": " messageSeverity must be a valid combination of VkDebugUtilsMessageSeverityFlagBitsEXT values" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageSeverity-requiredbitmask", + "text": " messageSeverity must not be 0" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-parameter", + "text": " messageType must be a valid combination of VkDebugUtilsMessageTypeFlagBitsEXT values" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCreateInfoEXT-messageType-requiredbitmask", + "text": " messageType must not be 0" + } + ] + }, + "VkDebugUtilsMessengerCallbackDataEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-flags-zerobitmask", + "text": " flags must be 0" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessageIdName-parameter", + "text": " If pMessageIdName is not NULL, pMessageIdName must be a null-terminated UTF-8 string" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-pMessage-parameter", + "text": " pMessage must be a null-terminated UTF-8 string" + }, + { + "vuid": "VUID-VkDebugUtilsMessengerCallbackDataEXT-objectCount-arraylength", + "text": " objectCount must be greater than 0" + } + ] + }, + "vkSubmitDebugUtilsMessageEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-messageSeverity-parameter", + "text": " messageSeverity must be a valid VkDebugUtilsMessageSeverityFlagBitsEXT value" + }, + { + "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-parameter", + "text": " messageTypes must be a valid combination of VkDebugUtilsMessageTypeFlagBitsEXT values" + }, + { + "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-messageTypes-requiredbitmask", + "text": " messageTypes must not be 0" + }, + { + "vuid": "VUID-vkSubmitDebugUtilsMessageEXT-pCallbackData-parameter", + "text": " pCallbackData must be a valid pointer to a valid VkDebugUtilsMessengerCallbackDataEXT structure" + } + ] + }, + "vkDestroyDebugUtilsMessengerEXT": { + "(VK_EXT_debug_utils)": [ + { + "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01915", + "text": " If VkAllocationCallbacks were provided when messenger was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-01916", + "text": " If no VkAllocationCallbacks were provided when messenger was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parameter", + "text": " messenger must be a valid VkDebugUtilsMessengerEXT handle" + }, + { + "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyDebugUtilsMessengerEXT-messenger-parent", + "text": " messenger must have been created, allocated, or retrieved from instance" + } + ] + }, + "vkDebugMarkerSetObjectNameEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-vkDebugMarkerSetObjectNameEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDebugMarkerSetObjectNameEXT-pNameInfo-parameter", + "text": " pNameInfo must be a valid pointer to a valid VkDebugMarkerObjectNameInfoEXT structure" + } + ] + }, + "VkDebugMarkerObjectNameInfoEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-objectType-01490", + "text": " objectType must not be VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT" + }, + { + "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-object-01491", + "text": " object must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-object-01492", + "text": " object must be a Vulkan object of the type associated with objectType as defined in &amp;lt;&amp;lt;debug-report-object-types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT" + }, + { + "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-objectType-parameter", + "text": " objectType must be a valid VkDebugReportObjectTypeEXT value" + }, + { + "vuid": "VUID-VkDebugMarkerObjectNameInfoEXT-pObjectName-parameter", + "text": " pObjectName must be a null-terminated UTF-8 string" + } + ] + }, + "vkDebugMarkerSetObjectTagEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-vkDebugMarkerSetObjectTagEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkDebugMarkerSetObjectTagEXT-pTagInfo-parameter", + "text": " pTagInfo must be a valid pointer to a valid VkDebugMarkerObjectTagInfoEXT structure" + } + ] + }, + "VkDebugMarkerObjectTagInfoEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-objectType-01493", + "text": " objectType must not be VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT" + }, + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-object-01494", + "text": " object must not be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-object-01495", + "text": " object must be a Vulkan object of the type associated with objectType as defined in &amp;lt;&amp;lt;debug-report-object-types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT" + }, + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-objectType-parameter", + "text": " objectType must be a valid VkDebugReportObjectTypeEXT value" + }, + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-pTag-parameter", + "text": " pTag must be a valid pointer to an array of tagSize bytes" + }, + { + "vuid": "VUID-VkDebugMarkerObjectTagInfoEXT-tagSize-arraylength", + "text": " tagSize must be greater than 0" + } + ] + }, + "vkCmdDebugMarkerBeginEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDebugMarkerBeginEXT-pMarkerInfo-parameter", + "text": " pMarkerInfo must be a valid pointer to a valid VkDebugMarkerMarkerInfoEXT structure" + }, + { + "vuid": "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDebugMarkerBeginEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + } + ] + }, + "VkDebugMarkerMarkerInfoEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-VkDebugMarkerMarkerInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT" + }, + { + "vuid": "VUID-VkDebugMarkerMarkerInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDebugMarkerMarkerInfoEXT-pMarkerName-parameter", + "text": " pMarkerName must be a null-terminated UTF-8 string" + } + ] + }, + "vkCmdDebugMarkerEndEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01239", + "text": " There must be an outstanding vkCmdDebugMarkerBeginEXT command prior to the vkCmdDebugMarkerEndEXT on the queue that commandBuffer is submitted to" + }, + { + "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-01240", + "text": " If commandBuffer is a secondary command buffer, there must be an outstanding vkCmdDebugMarkerBeginEXT command recorded to commandBuffer that has not previously been ended by a call to vkCmdDebugMarkerEndEXT." + }, + { + "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDebugMarkerEndEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + } + ] + }, + "vkCmdDebugMarkerInsertEXT": { + "(VK_EXT_debug_marker)": [ + { + "vuid": "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDebugMarkerInsertEXT-pMarkerInfo-parameter", + "text": " pMarkerInfo must be a valid pointer to a valid VkDebugMarkerMarkerInfoEXT structure" + }, + { + "vuid": "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-recording", + "text": " commandBuffer must be in the &amp;lt;&amp;lt;commandbuffers-lifecycle, recording state&amp;gt;&amp;gt;" + }, + { + "vuid": "VUID-vkCmdDebugMarkerInsertEXT-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + } + ] + }, + "vkCreateDebugReportCallbackEXT": { + "(VK_EXT_debug_report)": [ + { + "vuid": "VUID-vkCreateDebugReportCallbackEXT-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkCreateDebugReportCallbackEXT-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkDebugReportCallbackCreateInfoEXT structure" + }, + { + "vuid": "VUID-vkCreateDebugReportCallbackEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateDebugReportCallbackEXT-pCallback-parameter", + "text": " pCallback must be a valid pointer to a VkDebugReportCallbackEXT handle" + } + ] + }, + "VkDebugReportCallbackCreateInfoEXT": { + "(VK_EXT_debug_report)": [ + { + "vuid": "VUID-VkDebugReportCallbackCreateInfoEXT-pfnCallback-01385", + "text": " pfnCallback must be a valid PFN_vkDebugReportCallbackEXT" + }, + { + "vuid": "VUID-VkDebugReportCallbackCreateInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT" + }, + { + "vuid": "VUID-VkDebugReportCallbackCreateInfoEXT-flags-parameter", + "text": " flags must be a valid combination of VkDebugReportFlagBitsEXT values" + } + ] + }, + "vkDebugReportMessageEXT": { + "(VK_EXT_debug_report)": [ + { + "vuid": "VUID-vkDebugReportMessageEXT-object-01241", + "text": " object must be a Vulkan object or VK_NULL_HANDLE" + }, + { + "vuid": "VUID-vkDebugReportMessageEXT-objectType-01498", + "text": " If objectType is not VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT and object is not VK_NULL_HANDLE, object must be a Vulkan object of the corresponding type associated with objectType as defined in &amp;lt;&amp;lt;debug-report-object-types&amp;gt;&amp;gt;." + }, + { + "vuid": "VUID-vkDebugReportMessageEXT-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkDebugReportMessageEXT-flags-parameter", + "text": " flags must be a valid combination of VkDebugReportFlagBitsEXT values" + }, + { + "vuid": "VUID-vkDebugReportMessageEXT-flags-requiredbitmask", + "text": " flags must not be 0" + }, + { + "vuid": "VUID-vkDebugReportMessageEXT-objectType-parameter", + "text": " objectType must be a valid VkDebugReportObjectTypeEXT value" + }, + { + "vuid": "VUID-vkDebugReportMessageEXT-pLayerPrefix-parameter", + "text": " pLayerPrefix must be a null-terminated UTF-8 string" + }, + { + "vuid": "VUID-vkDebugReportMessageEXT-pMessage-parameter", + "text": " pMessage must be a null-terminated UTF-8 string" + } + ] + }, + "vkDestroyDebugReportCallbackEXT": { + "(VK_EXT_debug_report)": [ + { + "vuid": "VUID-vkDestroyDebugReportCallbackEXT-instance-01242", + "text": " If VkAllocationCallbacks were provided when callback was created, a compatible set of callbacks must be provided here" + }, + { + "vuid": "VUID-vkDestroyDebugReportCallbackEXT-instance-01243", + "text": " If no VkAllocationCallbacks were provided when callback was created, pAllocator must be NULL" + }, + { + "vuid": "VUID-vkDestroyDebugReportCallbackEXT-instance-parameter", + "text": " instance must be a valid VkInstance handle" + }, + { + "vuid": "VUID-vkDestroyDebugReportCallbackEXT-callback-parameter", + "text": " callback must be a valid VkDebugReportCallbackEXT handle" + }, + { + "vuid": "VUID-vkDestroyDebugReportCallbackEXT-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkDestroyDebugReportCallbackEXT-callback-parent", + "text": " callback must have been created, allocated, or retrieved from instance" + } + ] + } + } +} \ No newline at end of file diff --git a/registry/vk.xml b/registry/vk.xml new file mode 100644 index 0000000..8f4c13d --- /dev/null +++ b/registry/vk.xml @@ -0,0 +1,8699 @@ + + + +Copyright (c) 2015-2018 The Khronos Group Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +------------------------------------------------------------------------ + +This file, vk.xml, is the Vulkan API Registry. It is a critically important +and normative part of the Vulkan Specification, including a canonical +machine-readable definition of the API, parameter and member validation +language incorporated into the Specification and reference pages, and other +material which is registered by Khronos, such as tags used by extension and +layer authors. The authoritative public version of vk.xml is maintained in +the master branch of the Khronos Vulkan GitHub project. The authoritative +private version is maintained in the master branch of the member gitlab +server. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + #include "vk_platform.h" + + WSI extensions + + + + + + + + + In the current header structure, each platform's interfaces + are confined to a platform-specific header (vulkan_xlib.h, + vulkan_win32.h, etc.). These headers are not self-contained, + and should not include native headers (X11/Xlib.h, + windows.h, etc.). Code should either include vulkan.h after + defining the appropriate VK_USE_PLATFORM_platform_KHR + macros, or include the required native headers prior to + explicitly including the corresponding platform header. + + To accomplish this, the dependencies of native types require + native headers, but the XML defines the content for those + native headers as empty. The actual native header includes + can be restored by modifying the native header tags above + to #include the header file in the 'name' attribute. + + + + + + + + + + + + + + + + + + + + + #define VK_MAKE_VERSION(major, minor, patch) \ + (((major) << 22) | ((minor) << 12) | (patch)) + #define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) + #define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) + #define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) + + // DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 + // Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_VERSION(1, 0, 0)// Patch version should always be set to 0 + // Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_VERSION(1, 1, 0)// Patch version should always be set to 0 + // Version of this file +#define VK_HEADER_VERSION 74 + + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#if !defined(VK_DEFINE_NON_DISPATCHABLE_HANDLE) +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; +#else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; +#endif +#endif + + + +#define VK_NULL_HANDLE 0 + + + struct ANativeWindow; + struct AHardwareBuffer; + + typedef uint32_t VkSampleMask; + typedef uint32_t VkBool32; + typedef uint32_t VkFlags; + typedef uint64_t VkDeviceSize; + + Basic C types, pulled in via vk_platform.h + + + + + + + + + + + Bitmask types + typedef VkFlags VkFramebufferCreateFlags; + typedef VkFlags VkQueryPoolCreateFlags; + typedef VkFlags VkRenderPassCreateFlags; + typedef VkFlags VkSamplerCreateFlags; + typedef VkFlags VkPipelineLayoutCreateFlags; + typedef VkFlags VkPipelineCacheCreateFlags; + typedef VkFlags VkPipelineDepthStencilStateCreateFlags; + typedef VkFlags VkPipelineDynamicStateCreateFlags; + typedef VkFlags VkPipelineColorBlendStateCreateFlags; + typedef VkFlags VkPipelineMultisampleStateCreateFlags; + typedef VkFlags VkPipelineRasterizationStateCreateFlags; + typedef VkFlags VkPipelineViewportStateCreateFlags; + typedef VkFlags VkPipelineTessellationStateCreateFlags; + typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; + typedef VkFlags VkPipelineVertexInputStateCreateFlags; + typedef VkFlags VkPipelineShaderStageCreateFlags; + typedef VkFlags VkDescriptorSetLayoutCreateFlags; + typedef VkFlags VkBufferViewCreateFlags; + typedef VkFlags VkInstanceCreateFlags; + typedef VkFlags VkDeviceCreateFlags; + typedef VkFlags VkDeviceQueueCreateFlags; + typedef VkFlags VkQueueFlags; + typedef VkFlags VkMemoryPropertyFlags; + typedef VkFlags VkMemoryHeapFlags; + typedef VkFlags VkAccessFlags; + typedef VkFlags VkBufferUsageFlags; + typedef VkFlags VkBufferCreateFlags; + typedef VkFlags VkShaderStageFlags; + typedef VkFlags VkImageUsageFlags; + typedef VkFlags VkImageCreateFlags; + typedef VkFlags VkImageViewCreateFlags; + typedef VkFlags VkPipelineCreateFlags; + typedef VkFlags VkColorComponentFlags; + typedef VkFlags VkFenceCreateFlags; + typedef VkFlags VkSemaphoreCreateFlags; + typedef VkFlags VkFormatFeatureFlags; + typedef VkFlags VkQueryControlFlags; + typedef VkFlags VkQueryResultFlags; + typedef VkFlags VkShaderModuleCreateFlags; + typedef VkFlags VkEventCreateFlags; + typedef VkFlags VkCommandPoolCreateFlags; + typedef VkFlags VkCommandPoolResetFlags; + typedef VkFlags VkCommandBufferResetFlags; + typedef VkFlags VkCommandBufferUsageFlags; + typedef VkFlags VkQueryPipelineStatisticFlags; + typedef VkFlags VkMemoryMapFlags; + typedef VkFlags VkImageAspectFlags; + typedef VkFlags VkSparseMemoryBindFlags; + typedef VkFlags VkSparseImageFormatFlags; + typedef VkFlags VkSubpassDescriptionFlags; + typedef VkFlags VkPipelineStageFlags; + typedef VkFlags VkSampleCountFlags; + typedef VkFlags VkAttachmentDescriptionFlags; + typedef VkFlags VkStencilFaceFlags; + typedef VkFlags VkCullModeFlags; + typedef VkFlags VkDescriptorPoolCreateFlags; + typedef VkFlags VkDescriptorPoolResetFlags; + typedef VkFlags VkDependencyFlags; + typedef VkFlags VkSubgroupFeatureFlags; + typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; + typedef VkFlags VkObjectEntryUsageFlagsNVX; + + typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + + + WSI extensions + typedef VkFlags VkCompositeAlphaFlagsKHR; + typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; + typedef VkFlags VkSurfaceTransformFlagsKHR; + typedef VkFlags VkSwapchainCreateFlagsKHR; + typedef VkFlags VkDisplayModeCreateFlagsKHR; + typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; + typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; + typedef VkFlags VkMirSurfaceCreateFlagsKHR; + typedef VkFlags VkViSurfaceCreateFlagsNN; + typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; + typedef VkFlags VkWin32SurfaceCreateFlagsKHR; + typedef VkFlags VkXlibSurfaceCreateFlagsKHR; + typedef VkFlags VkXcbSurfaceCreateFlagsKHR; + typedef VkFlags VkIOSSurfaceCreateFlagsMVK; + typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; + typedef VkFlags VkPeerMemoryFeatureFlags; + + typedef VkFlags VkMemoryAllocateFlags; + + typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; + + typedef VkFlags VkDebugReportFlagsEXT; + typedef VkFlags VkCommandPoolTrimFlags; + + typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + typedef VkFlags VkExternalMemoryFeatureFlagsNV; + typedef VkFlags VkExternalMemoryHandleTypeFlags; + + typedef VkFlags VkExternalMemoryFeatureFlags; + + typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + + typedef VkFlags VkExternalSemaphoreFeatureFlags; + + typedef VkFlags VkSemaphoreImportFlags; + + typedef VkFlags VkExternalFenceHandleTypeFlags; + + typedef VkFlags VkExternalFenceFeatureFlags; + + typedef VkFlags VkFenceImportFlags; + + typedef VkFlags VkSurfaceCounterFlagsEXT; + typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; + typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; + typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; + typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; + typedef VkFlags VkValidationCacheCreateFlagsEXT; + typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; + typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; + typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; + typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; + typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; + typedef VkFlags VkDescriptorBindingFlagsEXT; + + Types which can be void pointers or class pointers, selected at compile time + VK_DEFINE_HANDLE(VkInstance) + VK_DEFINE_HANDLE(VkPhysicalDevice) + VK_DEFINE_HANDLE(VkDevice) + VK_DEFINE_HANDLE(VkQueue) + VK_DEFINE_HANDLE(VkCommandBuffer) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) + + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) + + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) + + WSI extensions + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) + VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) + + Types generated from corresponding enums tags below + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Extensions + + + + + + + + + + + + + + + + + + WSI extensions + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The PFN_vk*Function types are used by VkAllocationCallbacks below + typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + + The PFN_vkVoidFunction type are used by VkGet*ProcAddr below + typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); + + The PFN_vkDebugReportCallbackEXT type are used by the DEBUG_REPORT extension + typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + + The PFN_vkDebugUtilsMessengerCallbackEXT type are used by the VK_EXT_debug_utils extension + typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageType, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + + Struct types + + int32_t x + int32_t y + + + int32_t x + int32_t y + int32_t z + + + uint32_t width + uint32_t height + + + uint32_t width + uint32_t height + uint32_t depth + + + float x + float y + float width + float height + float minDepth + float maxDepth + + + VkOffset2D offset + VkExtent2D extent + + + VkRect2D rect + uint32_t baseArrayLayer + uint32_t layerCount + + + VkComponentSwizzle r + VkComponentSwizzle g + VkComponentSwizzle b + VkComponentSwizzle a + + + uint32_t apiVersion + uint32_t driverVersion + uint32_t vendorID + uint32_t deviceID + VkPhysicalDeviceType deviceType + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] + uint8_t pipelineCacheUUID[VK_UUID_SIZE] + VkPhysicalDeviceLimits limits + VkPhysicalDeviceSparseProperties sparseProperties + + + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]extension name + uint32_t specVersionversion of the extension specification implemented + + + char layerName[VK_MAX_EXTENSION_NAME_SIZE]layer name + uint32_t specVersionversion of the layer specification implemented + uint32_t implementationVersionbuild or release version of the layer's library + char description[VK_MAX_DESCRIPTION_SIZE]Free-form description of the layer + + + VkStructureType sType + const void* pNext + const char* pApplicationName + uint32_t applicationVersion + const char* pEngineName + uint32_t engineVersion + uint32_t apiVersion + + + void* pUserData + PFN_vkAllocationFunction pfnAllocation + PFN_vkReallocationFunction pfnReallocation + PFN_vkFreeFunction pfnFree + PFN_vkInternalAllocationNotification pfnInternalAllocation + PFN_vkInternalFreeNotification pfnInternalFree + + + VkStructureType sType + const void* pNext + VkDeviceQueueCreateFlags flags + uint32_t queueFamilyIndex + uint32_t queueCount + const float* pQueuePriorities + + + VkStructureType sType + const void* pNext + VkDeviceCreateFlags flags + uint32_t queueCreateInfoCount + const VkDeviceQueueCreateInfo* pQueueCreateInfos + uint32_t enabledLayerCount + const char* const* ppEnabledLayerNamesOrdered list of layer names to be enabled + uint32_t enabledExtensionCount + const char* const* ppEnabledExtensionNames + const VkPhysicalDeviceFeatures* pEnabledFeatures + + + VkStructureType sType + const void* pNext + VkInstanceCreateFlags flags + const VkApplicationInfo* pApplicationInfo + uint32_t enabledLayerCount + const char* const* ppEnabledLayerNamesOrdered list of layer names to be enabled + uint32_t enabledExtensionCount + const char* const* ppEnabledExtensionNamesExtension names to be enabled + + + VkQueueFlags queueFlagsQueue flags + uint32_t queueCount + uint32_t timestampValidBits + VkExtent3D minImageTransferGranularityMinimum alignment requirement for image transfers + + + uint32_t memoryTypeCount + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES] + uint32_t memoryHeapCount + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS] + + + VkStructureType sType + const void* pNext + VkDeviceSize allocationSizeSize of memory allocation + uint32_t memoryTypeIndexIndex of the of the memory type to allocate from + + + VkDeviceSize sizeSpecified in bytes + VkDeviceSize alignmentSpecified in bytes + uint32_t memoryTypeBitsBitmask of the allowed memory type indices into memoryTypes[] for this object + + + VkImageAspectFlags aspectMask + VkExtent3D imageGranularity + VkSparseImageFormatFlags flags + + + VkSparseImageFormatProperties formatProperties + uint32_t imageMipTailFirstLod + VkDeviceSize imageMipTailSizeSpecified in bytes, must be a multiple of sparse block size in bytes / alignment + VkDeviceSize imageMipTailOffsetSpecified in bytes, must be a multiple of sparse block size in bytes / alignment + VkDeviceSize imageMipTailStrideSpecified in bytes, must be a multiple of sparse block size in bytes / alignment + + + VkMemoryPropertyFlags propertyFlagsMemory properties of this memory type + uint32_t heapIndexIndex of the memory heap allocations of this memory type are taken from + + + VkDeviceSize sizeAvailable memory in the heap + VkMemoryHeapFlags flagsFlags for the heap + + + VkStructureType sType + const void* pNext + VkDeviceMemory memoryMapped memory object + VkDeviceSize offsetOffset within the memory object where the range starts + VkDeviceSize sizeSize of the range within the memory object + + + VkFormatFeatureFlags linearTilingFeaturesFormat features in case of linear tiling + VkFormatFeatureFlags optimalTilingFeaturesFormat features in case of optimal tiling + VkFormatFeatureFlags bufferFeaturesFormat features supported by buffers + + + VkExtent3D maxExtentmax image dimensions for this resource type + uint32_t maxMipLevelsmax number of mipmap levels for this resource type + uint32_t maxArrayLayersmax array size for this resource type + VkSampleCountFlags sampleCountssupported sample counts for this resource type + VkDeviceSize maxResourceSizemax size (in bytes) of this resource type + + + VkBuffer bufferBuffer used for this descriptor slot when the descriptor is UNIFORM_BUFFER[_DYNAMIC] or STORAGE_BUFFER[_DYNAMIC]. VK_NULL_HANDLE otherwise. + VkDeviceSize offsetBase offset from buffer start in bytes to update in the descriptor set. + VkDeviceSize rangeSize in bytes of the buffer resource for this descriptor update. + + + VkSampler samplerSampler to write to the descriptor in case it is a SAMPLER or COMBINED_IMAGE_SAMPLER descriptor. Ignored otherwise. + VkImageView imageViewImage view to write to the descriptor in case it is a SAMPLED_IMAGE, STORAGE_IMAGE, COMBINED_IMAGE_SAMPLER, or INPUT_ATTACHMENT descriptor. Ignored otherwise. + VkImageLayout imageLayoutLayout the image is expected to be in when accessed using this descriptor (only used if imageView is not VK_NULL_HANDLE). + + + VkStructureType sType + const void* pNext + VkDescriptorSet dstSetDestination descriptor set + uint32_t dstBindingBinding within the destination descriptor set to write + uint32_t dstArrayElementArray element within the destination binding to write + uint32_t descriptorCountNumber of descriptors to write (determines the size of the array pointed by pDescriptors) + VkDescriptorType descriptorTypeDescriptor type to write (determines which members of the array pointed by pDescriptors are going to be used) + const VkDescriptorImageInfo* pImageInfoSampler, image view, and layout for SAMPLER, COMBINED_IMAGE_SAMPLER, {SAMPLED,STORAGE}_IMAGE, and INPUT_ATTACHMENT descriptor types. + const VkDescriptorBufferInfo* pBufferInfoRaw buffer, size, and offset for {UNIFORM,STORAGE}_BUFFER[_DYNAMIC] descriptor types. + const VkBufferView* pTexelBufferViewBuffer view to write to the descriptor for {UNIFORM,STORAGE}_TEXEL_BUFFER descriptor types. + + + VkStructureType sType + const void* pNext + VkDescriptorSet srcSetSource descriptor set + uint32_t srcBindingBinding within the source descriptor set to copy from + uint32_t srcArrayElementArray element within the source binding to copy from + VkDescriptorSet dstSetDestination descriptor set + uint32_t dstBindingBinding within the destination descriptor set to copy to + uint32_t dstArrayElementArray element within the destination binding to copy to + uint32_t descriptorCountNumber of descriptors to write (determines the size of the array pointed by pDescriptors) + + + VkStructureType sType + const void* pNext + VkBufferCreateFlags flagsBuffer creation flags + VkDeviceSize sizeSpecified in bytes + VkBufferUsageFlags usageBuffer usage flags + VkSharingMode sharingMode + uint32_t queueFamilyIndexCount + const uint32_t* pQueueFamilyIndices + + + VkStructureType sType + const void* pNext + VkBufferViewCreateFlagsflags + VkBuffer buffer + VkFormat formatOptionally specifies format of elements + VkDeviceSize offsetSpecified in bytes + VkDeviceSize rangeView size specified in bytes + + + VkImageAspectFlags aspectMask + uint32_t mipLevel + uint32_t arrayLayer + + + VkImageAspectFlags aspectMask + uint32_t mipLevel + uint32_t baseArrayLayer + uint32_t layerCount + + + VkImageAspectFlags aspectMask + uint32_t baseMipLevel + uint32_t levelCount + uint32_t baseArrayLayer + uint32_t layerCount + + + VkStructureType sType + const void* pNext + VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize + VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize + + + VkStructureType sType + const void* pNext + VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize + VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize + uint32_t srcQueueFamilyIndexQueue family to transition ownership from + uint32_t dstQueueFamilyIndexQueue family to transition ownership to + VkBuffer bufferBuffer to sync + VkDeviceSize offsetOffset within the buffer to sync + VkDeviceSize sizeAmount of bytes to sync + + + VkStructureType sType + const void* pNext + VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize + VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize + VkImageLayout oldLayoutCurrent layout of the image + VkImageLayout newLayoutNew layout to transition the image to + uint32_t srcQueueFamilyIndexQueue family to transition ownership from + uint32_t dstQueueFamilyIndexQueue family to transition ownership to + VkImage imageImage to sync + VkImageSubresourceRange subresourceRangeSubresource range to sync + + + VkStructureType sType + const void* pNext + VkImageCreateFlags flagsImage creation flags + VkImageType imageType + VkFormat format + VkExtent3D extent + uint32_t mipLevels + uint32_t arrayLayers + VkSampleCountFlagBits samples + VkImageTiling tiling + VkImageUsageFlags usageImage usage flags + VkSharingMode sharingModeCross-queue-family sharing mode + uint32_t queueFamilyIndexCountNumber of queue families to share across + const uint32_t* pQueueFamilyIndicesArray of queue family indices to share across + VkImageLayout initialLayoutInitial image layout for all subresources + + + VkDeviceSize offsetSpecified in bytes + VkDeviceSize sizeSpecified in bytes + VkDeviceSize rowPitchSpecified in bytes + VkDeviceSize arrayPitchSpecified in bytes + VkDeviceSize depthPitchSpecified in bytes + + + VkStructureType sType + const void* pNext + VkImageViewCreateFlags flags + VkImage image + VkImageViewType viewType + VkFormat format + VkComponentMapping components + VkImageSubresourceRange subresourceRange + + + VkDeviceSize srcOffsetSpecified in bytes + VkDeviceSize dstOffsetSpecified in bytes + VkDeviceSize sizeSpecified in bytes + + + VkDeviceSize resourceOffsetSpecified in bytes + VkDeviceSize sizeSpecified in bytes + VkDeviceMemory memory + VkDeviceSize memoryOffsetSpecified in bytes + VkSparseMemoryBindFlagsflags + + + VkImageSubresource subresource + VkOffset3D offset + VkExtent3D extent + VkDeviceMemory memory + VkDeviceSize memoryOffsetSpecified in bytes + VkSparseMemoryBindFlagsflags + + + VkBuffer buffer + uint32_t bindCount + const VkSparseMemoryBind* pBinds + + + VkImage image + uint32_t bindCount + const VkSparseMemoryBind* pBinds + + + VkImage image + uint32_t bindCount + const VkSparseImageMemoryBind* pBinds + + + VkStructureType sType + const void* pNext + uint32_t waitSemaphoreCount + const VkSemaphore* pWaitSemaphores + uint32_t bufferBindCount + const VkSparseBufferMemoryBindInfo* pBufferBinds + uint32_t imageOpaqueBindCount + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds + uint32_t imageBindCount + const VkSparseImageMemoryBindInfo* pImageBinds + uint32_t signalSemaphoreCount + const VkSemaphore* pSignalSemaphores + + + VkImageSubresourceLayers srcSubresource + VkOffset3D srcOffsetSpecified in pixels for both compressed and uncompressed images + VkImageSubresourceLayers dstSubresource + VkOffset3D dstOffsetSpecified in pixels for both compressed and uncompressed images + VkExtent3D extentSpecified in pixels for both compressed and uncompressed images + + + VkImageSubresourceLayers srcSubresource + VkOffset3D srcOffsets[2]Specified in pixels for both compressed and uncompressed images + VkImageSubresourceLayers dstSubresource + VkOffset3D dstOffsets[2]Specified in pixels for both compressed and uncompressed images + + + VkDeviceSize bufferOffsetSpecified in bytes + uint32_t bufferRowLengthSpecified in texels + uint32_t bufferImageHeight + VkImageSubresourceLayers imageSubresource + VkOffset3D imageOffsetSpecified in pixels for both compressed and uncompressed images + VkExtent3D imageExtentSpecified in pixels for both compressed and uncompressed images + + + VkImageSubresourceLayers srcSubresource + VkOffset3D srcOffset + VkImageSubresourceLayers dstSubresource + VkOffset3D dstOffset + VkExtent3D extent + + + VkStructureType sType + const void* pNext + VkShaderModuleCreateFlags flags + size_t codeSizeSpecified in bytes + const uint32_t* pCodeBinary code of size codeSize + + + uint32_t bindingBinding number for this entry + VkDescriptorType descriptorTypeType of the descriptors in this binding + uint32_t descriptorCountNumber of descriptors in this binding + VkShaderStageFlags stageFlagsShader stages this binding is visible to + const VkSampler* pImmutableSamplersImmutable samplers (used if descriptor type is SAMPLER or COMBINED_IMAGE_SAMPLER, is either NULL or contains count number of elements) + + + VkStructureType sType + const void* pNext + VkDescriptorSetLayoutCreateFlags flags + uint32_t bindingCountNumber of bindings in the descriptor set layout + const VkDescriptorSetLayoutBinding* pBindingsArray of descriptor set layout bindings + + + VkDescriptorType type + uint32_t descriptorCount + + + VkStructureType sType + const void* pNext + VkDescriptorPoolCreateFlags flags + uint32_t maxSets + uint32_t poolSizeCount + const VkDescriptorPoolSize* pPoolSizes + + + VkStructureType sType + const void* pNext + VkDescriptorPool descriptorPool + uint32_t descriptorSetCount + const VkDescriptorSetLayout* pSetLayouts + + + uint32_t constantIDThe SpecConstant ID specified in the BIL + uint32_t offsetOffset of the value in the data block + size_t sizeSize in bytes of the SpecConstant + + + uint32_t mapEntryCountNumber of entries in the map + const VkSpecializationMapEntry* pMapEntriesArray of map entries + size_t dataSizeSize in bytes of pData + const void* pDataPointer to SpecConstant data + + + VkStructureType sType + const void* pNext + VkPipelineShaderStageCreateFlags flags + VkShaderStageFlagBits stageShader stage + VkShaderModule moduleModule containing entry point + const char* pNameNull-terminated entry point name + const VkSpecializationInfo* pSpecializationInfo + + + VkStructureType sType + const void* pNext + VkPipelineCreateFlags flagsPipeline creation flags + VkPipelineShaderStageCreateInfo stage + VkPipelineLayout layoutInterface layout of the pipeline + VkPipeline basePipelineHandleIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of + int32_t basePipelineIndexIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of + + + uint32_t bindingVertex buffer binding id + uint32_t strideDistance between vertices in bytes (0 = no advancement) + VkVertexInputRate inputRateThe rate at which the vertex data is consumed + + + uint32_t locationlocation of the shader vertex attrib + uint32_t bindingVertex buffer binding id + VkFormat formatformat of source data + uint32_t offsetOffset of first element in bytes from base of vertex + + + VkStructureType sType + const void* pNext + VkPipelineVertexInputStateCreateFlags flags + uint32_t vertexBindingDescriptionCountnumber of bindings + const VkVertexInputBindingDescription* pVertexBindingDescriptions + uint32_t vertexAttributeDescriptionCountnumber of attributes + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions + + + VkStructureType sType + const void* pNext + VkPipelineInputAssemblyStateCreateFlags flags + VkPrimitiveTopology topology + VkBool32 primitiveRestartEnable + + + VkStructureType sType + const void* pNext + VkPipelineTessellationStateCreateFlags flags + uint32_t patchControlPoints + + + VkStructureType sType + const void* pNext + VkPipelineViewportStateCreateFlags flags + uint32_t viewportCount + const VkViewport* pViewports + uint32_t scissorCount + const VkRect2D* pScissors + + + VkStructureType sType + const void* pNext + VkPipelineRasterizationStateCreateFlags flags + VkBool32 depthClampEnable + VkBool32 rasterizerDiscardEnable + VkPolygonMode polygonModeoptional (GL45) + VkCullModeFlags cullMode + VkFrontFace frontFace + VkBool32 depthBiasEnable + float depthBiasConstantFactor + float depthBiasClamp + float depthBiasSlopeFactor + float lineWidth + + + VkStructureType sType + const void* pNext + VkPipelineMultisampleStateCreateFlags flags + VkSampleCountFlagBits rasterizationSamplesNumber of samples used for rasterization + VkBool32 sampleShadingEnableoptional (GL45) + float minSampleShadingoptional (GL45) + const VkSampleMask* pSampleMaskArray of sampleMask words + VkBool32 alphaToCoverageEnable + VkBool32 alphaToOneEnable + + + VkBool32 blendEnable + VkBlendFactor srcColorBlendFactor + VkBlendFactor dstColorBlendFactor + VkBlendOp colorBlendOp + VkBlendFactor srcAlphaBlendFactor + VkBlendFactor dstAlphaBlendFactor + VkBlendOp alphaBlendOp + VkColorComponentFlags colorWriteMask + + + VkStructureType sType + const void* pNext + VkPipelineColorBlendStateCreateFlags flags + VkBool32 logicOpEnable + VkLogicOp logicOp + uint32_t attachmentCount# of pAttachments + const VkPipelineColorBlendAttachmentState* pAttachments + float blendConstants[4] + + + VkStructureType sType + const void* pNext + VkPipelineDynamicStateCreateFlags flags + uint32_t dynamicStateCount + const VkDynamicState* pDynamicStates + + + VkStencilOp failOp + VkStencilOp passOp + VkStencilOp depthFailOp + VkCompareOp compareOp + uint32_t compareMask + uint32_t writeMask + uint32_t reference + + + VkStructureType sType + const void* pNext + VkPipelineDepthStencilStateCreateFlags flags + VkBool32 depthTestEnable + VkBool32 depthWriteEnable + VkCompareOp depthCompareOp + VkBool32 depthBoundsTestEnableoptional (depth_bounds_test) + VkBool32 stencilTestEnable + VkStencilOpState front + VkStencilOpState back + float minDepthBounds + float maxDepthBounds + + + VkStructureType sType + const void* pNext + VkPipelineCreateFlags flagsPipeline creation flags + uint32_t stageCount + const VkPipelineShaderStageCreateInfo* pStagesOne entry for each active shader stage + const VkPipelineVertexInputStateCreateInfo* pVertexInputState + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState + const VkPipelineTessellationStateCreateInfo* pTessellationState + const VkPipelineViewportStateCreateInfo* pViewportState + const VkPipelineRasterizationStateCreateInfo* pRasterizationState + const VkPipelineMultisampleStateCreateInfo* pMultisampleState + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState + const VkPipelineColorBlendStateCreateInfo* pColorBlendState + const VkPipelineDynamicStateCreateInfo* pDynamicState + VkPipelineLayout layoutInterface layout of the pipeline + VkRenderPass renderPass + uint32_t subpass + VkPipeline basePipelineHandleIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of + int32_t basePipelineIndexIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of + + + VkStructureType sType + const void* pNext + VkPipelineCacheCreateFlags flags + size_t initialDataSizeSize of initial data to populate cache, in bytes + const void* pInitialDataInitial data to populate cache + + + VkShaderStageFlags stageFlagsWhich stages use the range + uint32_t offsetStart of the range, in bytes + uint32_t sizeSize of the range, in bytes + + + VkStructureType sType + const void* pNext + VkPipelineLayoutCreateFlags flags + uint32_t setLayoutCountNumber of descriptor sets interfaced by the pipeline + const VkDescriptorSetLayout* pSetLayoutsArray of setCount number of descriptor set layout objects defining the layout of the + uint32_t pushConstantRangeCountNumber of push-constant ranges used by the pipeline + const VkPushConstantRange* pPushConstantRangesArray of pushConstantRangeCount number of ranges used by various shader stages + + + VkStructureType sType + const void* pNext + VkSamplerCreateFlags flags + VkFilter magFilterFilter mode for magnification + VkFilter minFilterFilter mode for minifiation + VkSamplerMipmapMode mipmapModeMipmap selection mode + VkSamplerAddressMode addressModeU + VkSamplerAddressMode addressModeV + VkSamplerAddressMode addressModeW + float mipLodBias + VkBool32 anisotropyEnable + float maxAnisotropy + VkBool32 compareEnable + VkCompareOp compareOp + float minLod + float maxLod + VkBorderColor borderColor + VkBool32 unnormalizedCoordinates + + + VkStructureType sType + const void* pNext + VkCommandPoolCreateFlags flagsCommand pool creation flags + uint32_t queueFamilyIndex + + + VkStructureType sType + const void* pNext + VkCommandPool commandPool + VkCommandBufferLevel level + uint32_t commandBufferCount + + + VkStructureType sType + const void* pNext + VkRenderPass renderPassRender pass for secondary command buffers + uint32_t subpass + VkFramebuffer framebufferFramebuffer for secondary command buffers + VkBool32 occlusionQueryEnableWhether this secondary command buffer may be executed during an occlusion query + VkQueryControlFlags queryFlagsQuery flags used by this secondary command buffer, if executed during an occlusion query + VkQueryPipelineStatisticFlags pipelineStatisticsPipeline statistics that may be counted for this secondary command buffer + + + VkStructureType sType + const void* pNext + VkCommandBufferUsageFlags flagsCommand buffer usage flags + const VkCommandBufferInheritanceInfo* pInheritanceInfoPointer to inheritance info for secondary command buffers + + + VkStructureType sType + const void* pNext + VkRenderPass renderPass + VkFramebuffer framebuffer + VkRect2D renderArea + uint32_t clearValueCount + const VkClearValue* pClearValues + + + float float32[4] + int32_t int32[4] + uint32_t uint32[4] + + + float depth + uint32_t stencil + + + VkClearColorValue color + VkClearDepthStencilValue depthStencil + + + VkImageAspectFlags aspectMask + uint32_t colorAttachment + VkClearValue clearValue + + + VkAttachmentDescriptionFlags flags + VkFormat format + VkSampleCountFlagBits samples + VkAttachmentLoadOp loadOpLoad operation for color or depth data + VkAttachmentStoreOp storeOpStore operation for color or depth data + VkAttachmentLoadOp stencilLoadOpLoad operation for stencil data + VkAttachmentStoreOp stencilStoreOpStore operation for stencil data + VkImageLayout initialLayout + VkImageLayout finalLayout + + + uint32_t attachment + VkImageLayout layout + + + VkSubpassDescriptionFlags flags + VkPipelineBindPoint pipelineBindPointMust be VK_PIPELINE_BIND_POINT_GRAPHICS for now + uint32_t inputAttachmentCount + const VkAttachmentReference* pInputAttachments + uint32_t colorAttachmentCount + const VkAttachmentReference* pColorAttachments + const VkAttachmentReference* pResolveAttachments + const VkAttachmentReference* pDepthStencilAttachment + uint32_t preserveAttachmentCount + const uint32_t* pPreserveAttachments + + + uint32_t srcSubpass + uint32_t dstSubpass + VkPipelineStageFlags srcStageMask + VkPipelineStageFlags dstStageMask + VkAccessFlags srcAccessMaskMemory accesses from the source of the dependency to synchronize + VkAccessFlags dstAccessMaskMemory accesses from the destination of the dependency to synchronize + VkDependencyFlags dependencyFlags + + + VkStructureType sType + const void* pNext + VkRenderPassCreateFlags flags + uint32_t attachmentCount + const VkAttachmentDescription* pAttachments + uint32_t subpassCount + const VkSubpassDescription* pSubpasses + uint32_t dependencyCount + const VkSubpassDependency* pDependencies + + + VkStructureType sType + const void* pNext + VkEventCreateFlags flagsEvent creation flags + + + VkStructureType sType + const void* pNext + VkFenceCreateFlags flagsFence creation flags + + + VkBool32 robustBufferAccessout of bounds buffer accesses are well defined + VkBool32 fullDrawIndexUint32full 32-bit range of indices for indexed draw calls + VkBool32 imageCubeArrayimage views which are arrays of cube maps + VkBool32 independentBlendblending operations are controlled per-attachment + VkBool32 geometryShadergeometry stage + VkBool32 tessellationShadertessellation control and evaluation stage + VkBool32 sampleRateShadingper-sample shading and interpolation + VkBool32 dualSrcBlendblend operations which take two sources + VkBool32 logicOplogic operations + VkBool32 multiDrawIndirectmulti draw indirect + VkBool32 drawIndirectFirstInstanceindirect draws can use non-zero firstInstance + VkBool32 depthClampdepth clamping + VkBool32 depthBiasClampdepth bias clamping + VkBool32 fillModeNonSolidpoint and wireframe fill modes + VkBool32 depthBoundsdepth bounds test + VkBool32 wideLineslines with width greater than 1 + VkBool32 largePointspoints with size greater than 1 + VkBool32 alphaToOnethe fragment alpha component can be forced to maximum representable alpha value + VkBool32 multiViewportviewport arrays + VkBool32 samplerAnisotropyanisotropic sampler filtering + VkBool32 textureCompressionETC2ETC texture compression formats + VkBool32 textureCompressionASTC_LDRASTC LDR texture compression formats + VkBool32 textureCompressionBCBC1-7 texture compressed formats + VkBool32 occlusionQueryPreciseprecise occlusion queries returning actual sample counts + VkBool32 pipelineStatisticsQuerypipeline statistics query + VkBool32 vertexPipelineStoresAndAtomicsstores and atomic ops on storage buffers and images are supported in vertex, tessellation, and geometry stages + VkBool32 fragmentStoresAndAtomicsstores and atomic ops on storage buffers and images are supported in the fragment stage + VkBool32 shaderTessellationAndGeometryPointSizetessellation and geometry stages can export point size + VkBool32 shaderImageGatherExtendedimage gather with run-time values and independent offsets + VkBool32 shaderStorageImageExtendedFormatsthe extended set of formats can be used for storage images + VkBool32 shaderStorageImageMultisamplemultisample images can be used for storage images + VkBool32 shaderStorageImageReadWithoutFormatread from storage image does not require format qualifier + VkBool32 shaderStorageImageWriteWithoutFormatwrite to storage image does not require format qualifier + VkBool32 shaderUniformBufferArrayDynamicIndexingarrays of uniform buffers can be accessed with dynamically uniform indices + VkBool32 shaderSampledImageArrayDynamicIndexingarrays of sampled images can be accessed with dynamically uniform indices + VkBool32 shaderStorageBufferArrayDynamicIndexingarrays of storage buffers can be accessed with dynamically uniform indices + VkBool32 shaderStorageImageArrayDynamicIndexingarrays of storage images can be accessed with dynamically uniform indices + VkBool32 shaderClipDistanceclip distance in shaders + VkBool32 shaderCullDistancecull distance in shaders + VkBool32 shaderFloat6464-bit floats (doubles) in shaders + VkBool32 shaderInt6464-bit integers in shaders + VkBool32 shaderInt1616-bit integers in shaders + VkBool32 shaderResourceResidencyshader can use texture operations that return resource residency information (requires sparseNonResident support) + VkBool32 shaderResourceMinLodshader can use texture operations that specify minimum resource LOD + VkBool32 sparseBindingSparse resources support: Resource memory can be managed at opaque page level rather than object level + VkBool32 sparseResidencyBufferSparse resources support: GPU can access partially resident buffers + VkBool32 sparseResidencyImage2DSparse resources support: GPU can access partially resident 2D (non-MSAA non-depth/stencil) images + VkBool32 sparseResidencyImage3DSparse resources support: GPU can access partially resident 3D images + VkBool32 sparseResidency2SamplesSparse resources support: GPU can access partially resident MSAA 2D images with 2 samples + VkBool32 sparseResidency4SamplesSparse resources support: GPU can access partially resident MSAA 2D images with 4 samples + VkBool32 sparseResidency8SamplesSparse resources support: GPU can access partially resident MSAA 2D images with 8 samples + VkBool32 sparseResidency16SamplesSparse resources support: GPU can access partially resident MSAA 2D images with 16 samples + VkBool32 sparseResidencyAliasedSparse resources support: GPU can correctly access data aliased into multiple locations (opt-in) + VkBool32 variableMultisampleRatemultisample rate must be the same for all pipelines in a subpass + VkBool32 inheritedQueriesQueries may be inherited from primary to secondary command buffers + + + VkBool32 residencyStandard2DBlockShapeSparse resources support: GPU will access all 2D (single sample) sparse resources using the standard sparse image block shapes (based on pixel format) + VkBool32 residencyStandard2DMultisampleBlockShapeSparse resources support: GPU will access all 2D (multisample) sparse resources using the standard sparse image block shapes (based on pixel format) + VkBool32 residencyStandard3DBlockShapeSparse resources support: GPU will access all 3D sparse resources using the standard sparse image block shapes (based on pixel format) + VkBool32 residencyAlignedMipSizeSparse resources support: Images with mip level dimensions that are NOT a multiple of the sparse image block dimensions will be placed in the mip tail + VkBool32 residencyNonResidentStrictSparse resources support: GPU can consistently access non-resident regions of a resource, all reads return as if data is 0, writes are discarded + + + resource maximum sizes + uint32_t maxImageDimension1Dmax 1D image dimension + uint32_t maxImageDimension2Dmax 2D image dimension + uint32_t maxImageDimension3Dmax 3D image dimension + uint32_t maxImageDimensionCubemax cubemap image dimension + uint32_t maxImageArrayLayersmax layers for image arrays + uint32_t maxTexelBufferElementsmax texel buffer size (fstexels) + uint32_t maxUniformBufferRangemax uniform buffer range (bytes) + uint32_t maxStorageBufferRangemax storage buffer range (bytes) + uint32_t maxPushConstantsSizemax size of the push constants pool (bytes) + memory limits + uint32_t maxMemoryAllocationCountmax number of device memory allocations supported + uint32_t maxSamplerAllocationCountmax number of samplers that can be allocated on a device + VkDeviceSize bufferImageGranularityGranularity (in bytes) at which buffers and images can be bound to adjacent memory for simultaneous usage + VkDeviceSize sparseAddressSpaceSizeTotal address space available for sparse allocations (bytes) + descriptor set limits + uint32_t maxBoundDescriptorSetsmax number of descriptors sets that can be bound to a pipeline + uint32_t maxPerStageDescriptorSamplersmax number of samplers allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorUniformBuffersmax number of uniform buffers allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorStorageBuffersmax number of storage buffers allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorSampledImagesmax number of sampled images allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorStorageImagesmax number of storage images allowed per-stage in a descriptor set + uint32_t maxPerStageDescriptorInputAttachmentsmax number of input attachments allowed per-stage in a descriptor set + uint32_t maxPerStageResourcesmax number of resources allowed by a single stage + uint32_t maxDescriptorSetSamplersmax number of samplers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetUniformBuffersmax number of uniform buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetUniformBuffersDynamicmax number of dynamic uniform buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetStorageBuffersmax number of storage buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetStorageBuffersDynamicmax number of dynamic storage buffers allowed in all stages in a descriptor set + uint32_t maxDescriptorSetSampledImagesmax number of sampled images allowed in all stages in a descriptor set + uint32_t maxDescriptorSetStorageImagesmax number of storage images allowed in all stages in a descriptor set + uint32_t maxDescriptorSetInputAttachmentsmax number of input attachments allowed in all stages in a descriptor set + vertex stage limits + uint32_t maxVertexInputAttributesmax number of vertex input attribute slots + uint32_t maxVertexInputBindingsmax number of vertex input binding slots + uint32_t maxVertexInputAttributeOffsetmax vertex input attribute offset added to vertex buffer offset + uint32_t maxVertexInputBindingStridemax vertex input binding stride + uint32_t maxVertexOutputComponentsmax number of output components written by vertex shader + tessellation control stage limits + uint32_t maxTessellationGenerationLevelmax level supported by tessellation primitive generator + uint32_t maxTessellationPatchSizemax patch size (vertices) + uint32_t maxTessellationControlPerVertexInputComponentsmax number of input components per-vertex in TCS + uint32_t maxTessellationControlPerVertexOutputComponentsmax number of output components per-vertex in TCS + uint32_t maxTessellationControlPerPatchOutputComponentsmax number of output components per-patch in TCS + uint32_t maxTessellationControlTotalOutputComponentsmax total number of per-vertex and per-patch output components in TCS + tessellation evaluation stage limits + uint32_t maxTessellationEvaluationInputComponentsmax number of input components per vertex in TES + uint32_t maxTessellationEvaluationOutputComponentsmax number of output components per vertex in TES + geometry stage limits + uint32_t maxGeometryShaderInvocationsmax invocation count supported in geometry shader + uint32_t maxGeometryInputComponentsmax number of input components read in geometry stage + uint32_t maxGeometryOutputComponentsmax number of output components written in geometry stage + uint32_t maxGeometryOutputVerticesmax number of vertices that can be emitted in geometry stage + uint32_t maxGeometryTotalOutputComponentsmax total number of components (all vertices) written in geometry stage + fragment stage limits + uint32_t maxFragmentInputComponentsmax number of input components read in fragment stage + uint32_t maxFragmentOutputAttachmentsmax number of output attachments written in fragment stage + uint32_t maxFragmentDualSrcAttachmentsmax number of output attachments written when using dual source blending + uint32_t maxFragmentCombinedOutputResourcesmax total number of storage buffers, storage images and output buffers + compute stage limits + uint32_t maxComputeSharedMemorySizemax total storage size of work group local storage (bytes) + uint32_t maxComputeWorkGroupCount[3]max num of compute work groups that may be dispatched by a single command (x,y,z) + uint32_t maxComputeWorkGroupInvocationsmax total compute invocations in a single local work group + uint32_t maxComputeWorkGroupSize[3]max local size of a compute work group (x,y,z) + uint32_t subPixelPrecisionBitsnumber bits of subpixel precision in screen x and y + uint32_t subTexelPrecisionBitsnumber bits of precision for selecting texel weights + uint32_t mipmapPrecisionBitsnumber bits of precision for selecting mipmap weights + uint32_t maxDrawIndexedIndexValuemax index value for indexed draw calls (for 32-bit indices) + uint32_t maxDrawIndirectCountmax draw count for indirect draw calls + float maxSamplerLodBiasmax absolute sampler LOD bias + float maxSamplerAnisotropymax degree of sampler anisotropy + uint32_t maxViewportsmax number of active viewports + uint32_t maxViewportDimensions[2]max viewport dimensions (x,y) + float viewportBoundsRange[2]viewport bounds range (min,max) + uint32_t viewportSubPixelBitsnumber bits of subpixel precision for viewport + size_t minMemoryMapAlignmentmin required alignment of pointers returned by MapMemory (bytes) + VkDeviceSize minTexelBufferOffsetAlignmentmin required alignment for texel buffer offsets (bytes) + VkDeviceSize minUniformBufferOffsetAlignmentmin required alignment for uniform buffer sizes and offsets (bytes) + VkDeviceSize minStorageBufferOffsetAlignmentmin required alignment for storage buffer offsets (bytes) + int32_t minTexelOffsetmin texel offset for OpTextureSampleOffset + uint32_t maxTexelOffsetmax texel offset for OpTextureSampleOffset + int32_t minTexelGatherOffsetmin texel offset for OpTextureGatherOffset + uint32_t maxTexelGatherOffsetmax texel offset for OpTextureGatherOffset + float minInterpolationOffsetfurthest negative offset for interpolateAtOffset + float maxInterpolationOffsetfurthest positive offset for interpolateAtOffset + uint32_t subPixelInterpolationOffsetBitsnumber of subpixel bits for interpolateAtOffset + uint32_t maxFramebufferWidthmax width for a framebuffer + uint32_t maxFramebufferHeightmax height for a framebuffer + uint32_t maxFramebufferLayersmax layer count for a layered framebuffer + VkSampleCountFlags framebufferColorSampleCountssupported color sample counts for a framebuffer + VkSampleCountFlags framebufferDepthSampleCountssupported depth sample counts for a framebuffer + VkSampleCountFlags framebufferStencilSampleCountssupported stencil sample counts for a framebuffer + VkSampleCountFlags framebufferNoAttachmentsSampleCountssupported sample counts for a framebuffer with no attachments + uint32_t maxColorAttachmentsmax number of color attachments per subpass + VkSampleCountFlags sampledImageColorSampleCountssupported color sample counts for a non-integer sampled image + VkSampleCountFlags sampledImageIntegerSampleCountssupported sample counts for an integer image + VkSampleCountFlags sampledImageDepthSampleCountssupported depth sample counts for a sampled image + VkSampleCountFlags sampledImageStencilSampleCountssupported stencil sample counts for a sampled image + VkSampleCountFlags storageImageSampleCountssupported sample counts for a storage image + uint32_t maxSampleMaskWordsmax number of sample mask words + VkBool32 timestampComputeAndGraphicstimestamps on graphics and compute queues + float timestampPeriodnumber of nanoseconds it takes for timestamp query value to increment by 1 + uint32_t maxClipDistancesmax number of clip distances + uint32_t maxCullDistancesmax number of cull distances + uint32_t maxCombinedClipAndCullDistancesmax combined number of user clipping + uint32_t discreteQueuePrioritiesdistinct queue priorities available + float pointSizeRange[2]range (min,max) of supported point sizes + float lineWidthRange[2]range (min,max) of supported line widths + float pointSizeGranularitygranularity of supported point sizes + float lineWidthGranularitygranularity of supported line widths + VkBool32 strictLinesline rasterization follows preferred rules + VkBool32 standardSampleLocationssupports standard sample locations for all supported sample counts + VkDeviceSize optimalBufferCopyOffsetAlignmentoptimal offset of buffer copies + VkDeviceSize optimalBufferCopyRowPitchAlignmentoptimal pitch of buffer copies + VkDeviceSize nonCoherentAtomSizeminimum size and alignment for non-coherent host-mapped device memory access + + + VkStructureType sType + const void* pNext + VkSemaphoreCreateFlags flagsSemaphore creation flags + + + VkStructureType sType + const void* pNext + VkQueryPoolCreateFlags flags + VkQueryType queryType + uint32_t queryCount + VkQueryPipelineStatisticFlags pipelineStatisticsOptional + + + VkStructureType sType + const void* pNext + VkFramebufferCreateFlags flags + VkRenderPass renderPass + uint32_t attachmentCount + const VkImageView* pAttachments + uint32_t width + uint32_t height + uint32_t layers + + + uint32_t vertexCount + uint32_t instanceCount + uint32_t firstVertex + uint32_t firstInstance + + + uint32_t indexCount + uint32_t instanceCount + uint32_t firstIndex + int32_t vertexOffset + uint32_t firstInstance + + + uint32_t x + uint32_t y + uint32_t z + + + VkStructureType sType + const void* pNext + uint32_t waitSemaphoreCount + const VkSemaphore* pWaitSemaphores + const VkPipelineStageFlags* pWaitDstStageMask + uint32_t commandBufferCount + const VkCommandBuffer* pCommandBuffers + uint32_t signalSemaphoreCount + const VkSemaphore* pSignalSemaphores + + WSI extensions + + VkDisplayKHR displayHandle of the display object + const char* displayNameName of the display + VkExtent2D physicalDimensionsIn millimeters? + VkExtent2D physicalResolutionMax resolution for CRT? + VkSurfaceTransformFlagsKHR supportedTransformsone or more bits from VkSurfaceTransformFlagsKHR + VkBool32 planeReorderPossibleVK_TRUE if the overlay plane's z-order can be changed on this display. + VkBool32 persistentContentVK_TRUE if this is a "smart" display that supports self-refresh/internal buffering. + + + VkDisplayKHR currentDisplayDisplay the plane is currently associated with. Will be VK_NULL_HANDLE if the plane is not in use. + uint32_t currentStackIndexCurrent z-order of the plane. + + + VkExtent2D visibleRegionVisible scanout region. + uint32_t refreshRateNumber of times per second the display is updated. + + + VkDisplayModeKHR displayModeHandle of this display mode. + VkDisplayModeParametersKHR parametersThe parameters this mode uses. + + + VkStructureType sType + const void* pNext + VkDisplayModeCreateFlagsKHR flags + VkDisplayModeParametersKHR parametersThe parameters this mode uses. + + + VkDisplayPlaneAlphaFlagsKHR supportedAlphaTypes of alpha blending supported, if any. + VkOffset2D minSrcPositionDoes the plane have any position and extent restrictions? + VkOffset2D maxSrcPosition + VkExtent2D minSrcExtent + VkExtent2D maxSrcExtent + VkOffset2D minDstPosition + VkOffset2D maxDstPosition + VkExtent2D minDstExtent + VkExtent2D maxDstExtent + + + VkStructureType sType + const void* pNext + VkDisplaySurfaceCreateFlagsKHR flags + VkDisplayModeKHR displayModeThe mode to use when displaying this surface + uint32_t planeIndexThe plane on which this surface appears. Must be between 0 and the value returned by vkGetPhysicalDeviceDisplayPlanePropertiesKHR() in pPropertyCount. + uint32_t planeStackIndexThe z-order of the plane. + VkSurfaceTransformFlagBitsKHR transformTransform to apply to the images as part of the scanout operation + float globalAlphaGlobal alpha value. Must be between 0 and 1, inclusive. Ignored if alphaMode is not VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR + VkDisplayPlaneAlphaFlagBitsKHR alphaModeWhat type of alpha blending to use. Must be a bit from vkGetDisplayPlanePropertiesKHR::supportedAlpha. + VkExtent2D imageExtentsize of the images to use with this surface + + + VkStructureType sType + const void* pNext + VkRect2D srcRectRectangle within the presentable image to read pixel data from when presenting to the display. + VkRect2D dstRectRectangle within the current display mode's visible region to display srcRectangle in. + VkBool32 persistentFor smart displays, use buffered mode. If the display properties member "persistentMode" is VK_FALSE, this member must always be VK_FALSE. + + + uint32_t minImageCountSupported minimum number of images for the surface + uint32_t maxImageCountSupported maximum number of images for the surface, 0 for unlimited + VkExtent2D currentExtentCurrent image width and height for the surface, (0, 0) if undefined + VkExtent2D minImageExtentSupported minimum image width and height for the surface + VkExtent2D maxImageExtentSupported maximum image width and height for the surface + uint32_t maxImageArrayLayersSupported maximum number of image layers for the surface + VkSurfaceTransformFlagsKHR supportedTransforms1 or more bits representing the transforms supported + VkSurfaceTransformFlagBitsKHR currentTransformThe surface's current transform relative to the device's natural orientation + VkCompositeAlphaFlagsKHR supportedCompositeAlpha1 or more bits representing the alpha compositing modes supported + VkImageUsageFlags supportedUsageFlagsSupported image usage flags for the surface + + + VkStructureType sType + const void* pNext + VkAndroidSurfaceCreateFlagsKHR flags + struct ANativeWindow* window + + + VkStructureType sType + const void* pNext + VkMirSurfaceCreateFlagsKHR flags + MirConnection* connection + MirSurface* mirSurface + + + VkStructureType sType + const void* pNext + VkViSurfaceCreateFlagsNN flags + void* window + + + VkStructureType sType + const void* pNext + VkWaylandSurfaceCreateFlagsKHR flags + struct wl_display* display + struct wl_surface* surface + + + VkStructureType sType + const void* pNext + VkWin32SurfaceCreateFlagsKHR flags + HINSTANCE hinstance + HWND hwnd + + + VkStructureType sType + const void* pNext + VkXlibSurfaceCreateFlagsKHR flags + Display* dpy + Window window + + + VkStructureType sType + const void* pNext + VkXcbSurfaceCreateFlagsKHR flags + xcb_connection_t* connection + xcb_window_t window + + + VkFormat formatSupported pair of rendering format + VkColorSpaceKHR colorSpaceand color space for the surface + + + VkStructureType sType + const void* pNext + VkSwapchainCreateFlagsKHR flags + VkSurfaceKHR surfaceThe swapchain's target surface + uint32_t minImageCountMinimum number of presentation images the application needs + VkFormat imageFormatFormat of the presentation images + VkColorSpaceKHR imageColorSpaceColorspace of the presentation images + VkExtent2D imageExtentDimensions of the presentation images + uint32_t imageArrayLayersDetermines the number of views for multiview/stereo presentation + VkImageUsageFlags imageUsageBits indicating how the presentation images will be used + VkSharingMode imageSharingModeSharing mode used for the presentation images + uint32_t queueFamilyIndexCountNumber of queue families having access to the images in case of concurrent sharing mode + const uint32_t* pQueueFamilyIndicesArray of queue family indices having access to the images in case of concurrent sharing mode + VkSurfaceTransformFlagBitsKHR preTransformThe transform, relative to the device's natural orientation, applied to the image content prior to presentation + VkCompositeAlphaFlagBitsKHR compositeAlphaThe alpha blending mode used when compositing this surface with other surfaces in the window system + VkPresentModeKHR presentModeWhich presentation mode to use for presents on this swap chain + VkBool32 clippedSpecifies whether presentable images may be affected by window clip regions + VkSwapchainKHR oldSwapchainExisting swap chain to replace, if any + + + VkStructureType sType + const void* pNext + uint32_t waitSemaphoreCountNumber of semaphores to wait for before presenting + const VkSemaphore* pWaitSemaphoresSemaphores to wait for before presenting + uint32_t swapchainCountNumber of swapchains to present in this call + const VkSwapchainKHR* pSwapchainsSwapchains to present an image from + const uint32_t* pImageIndicesIndices of which presentable images to present + VkResult* pResultsOptional (i.e. if non-NULL) VkResult for each swapchain + + + VkStructureType sType + const void* pNext + VkDebugReportFlagsEXT flagsIndicates which events call this callback + PFN_vkDebugReportCallbackEXT pfnCallbackFunction pointer of a callback function + void* pUserDataUser data provided to callback function + + + VkStructureType sTypeMust be VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT + const void* pNext + uint32_t disabledValidationCheckCountNumber of validation checks to disable + VkValidationCheckEXT* pDisabledValidationChecksValidation checks to disable + + + VkStructureType sType + const void* pNext + VkRasterizationOrderAMD rasterizationOrderRasterization order to use for the pipeline + + + VkStructureType sType + const void* pNext + VkDebugReportObjectTypeEXT objectTypeThe type of the object + uint64_t objectThe handle of the object, cast to uint64_t + const char* pObjectNameName to apply to the object + + + VkStructureType sType + const void* pNext + VkDebugReportObjectTypeEXT objectTypeThe type of the object + uint64_t objectThe handle of the object, cast to uint64_t + uint64_t tagNameThe name of the tag to set on the object + size_t tagSizeThe length in bytes of the tag data + const void* pTagTag data to attach to the object + + + VkStructureType sType + const void* pNext + const char* pMarkerNameName of the debug marker + float color[4]Optional color for debug marker + + + VkStructureType sType + const void* pNext + VkBool32 dedicatedAllocationWhether this image uses a dedicated allocation + + + VkStructureType sType + const void* pNext + VkBool32 dedicatedAllocationWhether this buffer uses a dedicated allocation + + + VkStructureType sType + const void* pNext + VkImage imageImage that this allocation will be bound to + VkBuffer bufferBuffer that this allocation will be bound to + + + VkImageFormatProperties imageFormatProperties + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagsNV handleTypes + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagsNV handleTypes + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagsNV handleType + HANDLE handle + + + VkStructureType sType + const void* pNext + const SECURITY_ATTRIBUTES* pAttributes + DWORD dwAccess + + + VkStructureType sType + const void* pNext + uint32_t acquireCount + const VkDeviceMemory* pAcquireSyncs + const uint64_t* pAcquireKeys + const uint32_t* pAcquireTimeoutMilliseconds + uint32_t releaseCount + const VkDeviceMemory* pReleaseSyncs + const uint64_t* pReleaseKeys + + + VkStructureType sType + const void* pNext + VkBool32 computeBindingPointSupport + + + VkStructureType sType + const void* pNext + uint32_t maxIndirectCommandsLayoutTokenCount + uint32_t maxObjectEntryCounts + uint32_t minSequenceCountBufferOffsetAlignment + uint32_t minSequenceIndexBufferOffsetAlignment + uint32_t minCommandsTokenBufferOffsetAlignment + + + VkIndirectCommandsTokenTypeNVX tokenType + VkBuffer bufferbuffer containing tableEntries and additional data for indirectCommands + VkDeviceSize offsetoffset from the base address of the buffer + + + VkIndirectCommandsTokenTypeNVX tokenType + uint32_t bindingUnitBinding unit for vertex attribute / descriptor set, offset for pushconstants + uint32_t dynamicCountNumber of variable dynamic values for descriptor set / push constants + uint32_t divisorRate the which the array is advanced per element (must be power of 2, minimum 1) + + + VkStructureType sType + const void* pNext + VkPipelineBindPoint pipelineBindPoint + VkIndirectCommandsLayoutUsageFlagsNVX flags + uint32_t tokenCount + const VkIndirectCommandsLayoutTokenNVX* pTokens + + + VkStructureType sType + const void* pNext + VkObjectTableNVX objectTable + VkIndirectCommandsLayoutNVX indirectCommandsLayout + uint32_t indirectCommandsTokenCount + const VkIndirectCommandsTokenNVX* pIndirectCommandsTokens + uint32_t maxSequencesCount + VkCommandBuffer targetCommandBuffer + VkBuffer sequencesCountBuffer + VkDeviceSize sequencesCountOffset + VkBuffer sequencesIndexBuffer + VkDeviceSize sequencesIndexOffset + + + VkStructureType sType + const void* pNext + VkObjectTableNVX objectTable + VkIndirectCommandsLayoutNVX indirectCommandsLayout + uint32_t maxSequencesCount + + + VkStructureType sType + const void* pNext + uint32_t objectCount + const VkObjectEntryTypeNVX* pObjectEntryTypes + const uint32_t* pObjectEntryCounts + const VkObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags + + uint32_t maxUniformBuffersPerDescriptor + uint32_t maxStorageBuffersPerDescriptor + uint32_t maxStorageImagesPerDescriptor + uint32_t maxSampledImagesPerDescriptor + uint32_t maxPipelineLayouts + + + VkObjectEntryTypeNVX type + VkObjectEntryUsageFlagsNVX flags + + + VkObjectEntryTypeNVX type + VkObjectEntryUsageFlagsNVX flags + VkPipeline pipeline + + + VkObjectEntryTypeNVX type + VkObjectEntryUsageFlagsNVX flags + VkPipelineLayout pipelineLayout + VkDescriptorSet descriptorSet + + + VkObjectEntryTypeNVX type + VkObjectEntryUsageFlagsNVX flags + VkBuffer buffer + + + VkObjectEntryTypeNVX type + VkObjectEntryUsageFlagsNVX flags + VkBuffer buffer + VkIndexType indexType + + + VkObjectEntryTypeNVX type + VkObjectEntryUsageFlagsNVX flags + VkPipelineLayout pipelineLayout + VkShaderStageFlags stageFlags + + + VkStructureType sType + void* pNext + VkPhysicalDeviceFeatures features + + + + VkStructureType sType + void* pNext + VkPhysicalDeviceProperties properties + + + + VkStructureType sType + void* pNext + VkFormatProperties formatProperties + + + + VkStructureType sType + void* pNext + VkImageFormatProperties imageFormatProperties + + + + VkStructureType sType + const void* pNext + VkFormat format + VkImageType type + VkImageTiling tiling + VkImageUsageFlags usage + VkImageCreateFlags flags + + + + VkStructureType sType + void* pNext + VkQueueFamilyProperties queueFamilyProperties + + + + VkStructureType sType + void* pNext + VkPhysicalDeviceMemoryProperties memoryProperties + + + + VkStructureType sType + void* pNext + VkSparseImageFormatProperties properties + + + + VkStructureType sType + const void* pNext + VkFormat format + VkImageType type + VkSampleCountFlagBits samples + VkImageUsageFlags usage + VkImageTiling tiling + + + + VkStructureType sType + void* pNext + uint32_t maxPushDescriptors + + + VkStructureType sType + const void* pNext + uint32_t swapchainCountCopy of VkPresentInfoKHR::swapchainCount + const VkPresentRegionKHR* pRegionsThe regions that have changed + + + uint32_t rectangleCountNumber of rectangles in pRectangles + const VkRectLayerKHR* pRectanglesArray of rectangles that have changed in a swapchain's image(s) + + + VkOffset2D offsetupper-left corner of a rectangle that has not changed, in pixels of a presentation images + VkExtent2D extentDimensions of a rectangle that has not changed, in pixels of a presentation images + uint32_t layerLayer of a swapchain's image(s), for stereoscopic-3D images + + + VkStructureType sType + void* pNext + VkBool32 variablePointersStorageBuffer + VkBool32 variablePointers + + + + VkExternalMemoryFeatureFlags externalMemoryFeatures + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes + VkExternalMemoryHandleTypeFlags compatibleHandleTypes + + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagBits handleType + + + + VkStructureType sType + void* pNext + VkExternalMemoryProperties externalMemoryProperties + + + + VkStructureType sType + const void* pNext + VkBufferCreateFlags flags + VkBufferUsageFlags usage + VkExternalMemoryHandleTypeFlagBits handleType + + + + VkStructureType sType + void* pNext + VkExternalMemoryProperties externalMemoryProperties + + + + VkStructureType sType + void* pNext + uint8_t deviceUUID[VK_UUID_SIZE] + uint8_t driverUUID[VK_UUID_SIZE] + uint8_t deviceLUID[VK_LUID_SIZE] + uint32_t deviceNodeMask + VkBool32 deviceLUIDValid + + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlags handleTypes + + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlags handleTypes + + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlags handleTypes + + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagBits handleType + HANDLE handle + LPCWSTR name + + + VkStructureType sType + const void* pNext + const SECURITY_ATTRIBUTES* pAttributes + DWORD dwAccess + LPCWSTR name + + + VkStructureType sType + void* pNext + uint32_t memoryTypeBits + + + VkStructureType sType + const void* pNext + VkDeviceMemory memory + VkExternalMemoryHandleTypeFlagBits handleType + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagBits handleType + int fd + + + VkStructureType sType + void* pNext + uint32_t memoryTypeBits + + + VkStructureType sType + const void* pNext + VkDeviceMemory memory + VkExternalMemoryHandleTypeFlagBits handleType + + + VkStructureType sType + const void* pNext + uint32_t acquireCount + const VkDeviceMemory* pAcquireSyncs + const uint64_t* pAcquireKeys + const uint32_t* pAcquireTimeouts + uint32_t releaseCount + const VkDeviceMemory* pReleaseSyncs + const uint64_t* pReleaseKeys + + + VkStructureType sType + const void* pNext + VkExternalSemaphoreHandleTypeFlagBits handleType + + + + VkStructureType sType + void* pNext + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures + + + + VkStructureType sType + const void* pNext + VkExternalSemaphoreHandleTypeFlags handleTypes + + + + VkStructureType sType + const void* pNext + VkSemaphore semaphore + VkSemaphoreImportFlags flags + VkExternalSemaphoreHandleTypeFlagBits handleType + HANDLE handle + LPCWSTR name + + + VkStructureType sType + const void* pNext + const SECURITY_ATTRIBUTES* pAttributes + DWORD dwAccess + LPCWSTR name + + + VkStructureType sType + const void* pNext + uint32_t waitSemaphoreValuesCount + const uint64_t* pWaitSemaphoreValues + uint32_t signalSemaphoreValuesCount + const uint64_t* pSignalSemaphoreValues + + + VkStructureType sType + const void* pNext + VkSemaphore semaphore + VkExternalSemaphoreHandleTypeFlagBits handleType + + + VkStructureType sType + const void* pNext + VkSemaphore semaphore + VkSemaphoreImportFlags flags + VkExternalSemaphoreHandleTypeFlagBits handleType + int fd + + + VkStructureType sType + const void* pNext + VkSemaphore semaphore + VkExternalSemaphoreHandleTypeFlagBits handleType + + + VkStructureType sType + const void* pNext + VkExternalFenceHandleTypeFlagBits handleType + + + + VkStructureType sType + void* pNext + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes + VkExternalFenceHandleTypeFlags compatibleHandleTypes + VkExternalFenceFeatureFlags externalFenceFeatures + + + + VkStructureType sType + const void* pNext + VkExternalFenceHandleTypeFlags handleTypes + + + + VkStructureType sType + const void* pNext + VkFence fence + VkFenceImportFlags flags + VkExternalFenceHandleTypeFlagBits handleType + HANDLE handle + LPCWSTR name + + + VkStructureType sType + const void* pNext + const SECURITY_ATTRIBUTES* pAttributes + DWORD dwAccess + LPCWSTR name + + + VkStructureType sType + const void* pNext + VkFence fence + VkExternalFenceHandleTypeFlagBits handleType + + + VkStructureType sType + const void* pNext + VkFence fence + VkFenceImportFlags flags + VkExternalFenceHandleTypeFlagBits handleType + int fd + + + VkStructureType sType + const void* pNext + VkFence fence + VkExternalFenceHandleTypeFlagBits handleType + + + VkStructureType sType + void* pNext + VkBool32 multiviewMultiple views in a renderpass + VkBool32 multiviewGeometryShaderMultiple views in a renderpass w/ geometry shader + VkBool32 multiviewTessellationShaderMultiple views in a renderpass w/ tessellation shader + + + + VkStructureType sType + void* pNext + uint32_t maxMultiviewViewCountmax number of views in a subpass + uint32_t maxMultiviewInstanceIndexmax instance index for a draw in a multiview subpass + + + + VkStructureType sType + const void* pNext + uint32_t subpassCount + const uint32_t* pViewMasks + uint32_t dependencyCount + const int32_t* pViewOffsets + uint32_t correlationMaskCount + const uint32_t* pCorrelationMasks + + + + VkStructureType sType + void* pNext + uint32_t minImageCountSupported minimum number of images for the surface + uint32_t maxImageCountSupported maximum number of images for the surface, 0 for unlimited + VkExtent2D currentExtentCurrent image width and height for the surface, (0, 0) if undefined + VkExtent2D minImageExtentSupported minimum image width and height for the surface + VkExtent2D maxImageExtentSupported maximum image width and height for the surface + uint32_t maxImageArrayLayersSupported maximum number of image layers for the surface + VkSurfaceTransformFlagsKHR supportedTransforms1 or more bits representing the transforms supported + VkSurfaceTransformFlagBitsKHR currentTransformThe surface's current transform relative to the device's natural orientation + VkCompositeAlphaFlagsKHR supportedCompositeAlpha1 or more bits representing the alpha compositing modes supported + VkImageUsageFlags supportedUsageFlagsSupported image usage flags for the surface + VkSurfaceCounterFlagsEXT supportedSurfaceCounters + + + VkStructureType sType + const void* pNext + VkDisplayPowerStateEXT powerState + + + VkStructureType sType + const void* pNext + VkDeviceEventTypeEXT deviceEvent + + + VkStructureType sType + const void* pNext + VkDisplayEventTypeEXT displayEvent + + + VkStructureType sType + const void* pNext + VkSurfaceCounterFlagsEXT surfaceCounters + + + VkStructureType sType + void* pNext + uint32_t physicalDeviceCount + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE] + VkBool32 subsetAllocation + + + + VkStructureType sType + const void* pNext + VkMemoryAllocateFlags flags + uint32_t deviceMask + + + + VkStructureType sType + const void* pNext + VkBuffer buffer + VkDeviceMemory memory + VkDeviceSize memoryOffset + + + + VkStructureType sType + const void* pNext + uint32_t deviceIndexCount + const uint32_t* pDeviceIndices + + + + VkStructureType sType + const void* pNext + VkImage image + VkDeviceMemory memory + VkDeviceSize memoryOffset + + + + VkStructureType sType + const void* pNext + uint32_t deviceIndexCount + const uint32_t* pDeviceIndices + uint32_t splitInstanceBindRegionCount + const VkRect2D* pSplitInstanceBindRegions + + + + VkStructureType sType + const void* pNext + uint32_t deviceMask + uint32_t deviceRenderAreaCount + const VkRect2D* pDeviceRenderAreas + + + + VkStructureType sType + const void* pNext + uint32_t deviceMask + + + + VkStructureType sType + const void* pNext + uint32_t waitSemaphoreCount + const uint32_t* pWaitSemaphoreDeviceIndices + uint32_t commandBufferCount + const uint32_t* pCommandBufferDeviceMasks + uint32_t signalSemaphoreCount + const uint32_t* pSignalSemaphoreDeviceIndices + + + + VkStructureType sType + const void* pNext + uint32_t resourceDeviceIndex + uint32_t memoryDeviceIndex + + + + VkStructureType sType + const void* pNext + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE] + VkDeviceGroupPresentModeFlagsKHR modes + + + VkStructureType sType + const void* pNext + VkSwapchainKHR swapchain + + + VkStructureType sType + const void* pNext + VkSwapchainKHR swapchain + uint32_t imageIndex + + + VkStructureType sType + const void* pNext + VkSwapchainKHR swapchain + uint64_t timeout + VkSemaphore semaphore + VkFence fence + uint32_t deviceMask + + + VkStructureType sType + const void* pNext + uint32_t swapchainCount + const uint32_t* pDeviceMasks + VkDeviceGroupPresentModeFlagBitsKHR mode + + + VkStructureType sType + const void* pNext + uint32_t physicalDeviceCount + const VkPhysicalDevice* pPhysicalDevices + + + + VkStructureType sType + const void* pNext + VkDeviceGroupPresentModeFlagsKHR modes + + + uint32_t dstBindingBinding within the destination descriptor set to write + uint32_t dstArrayElementArray element within the destination binding to write + uint32_t descriptorCountNumber of descriptors to write + VkDescriptorType descriptorTypeDescriptor type to write + size_t offsetOffset into pData where the descriptors to update are stored + size_t strideStride between two descriptors in pData when writing more than one descriptor + + + + VkStructureType sType + void* pNext + VkDescriptorUpdateTemplateCreateFlags flags + uint32_t descriptorUpdateEntryCountNumber of descriptor update entries to use for the update template + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntriesDescriptor update entries for the template + VkDescriptorUpdateTemplateType templateType + VkDescriptorSetLayout descriptorSetLayout + VkPipelineBindPoint pipelineBindPoint + VkPipelineLayoutpipelineLayoutIf used for push descriptors, this is the only allowed layout + uint32_t set + + + + float x + float y + + + Display primary in chromaticity coordinates + VkStructureType sType + const void* pNext + From SMPTE 2086 + VkXYColorEXT displayPrimaryRedDisplay primary's Red + VkXYColorEXT displayPrimaryGreenDisplay primary's Green + VkXYColorEXT displayPrimaryBlueDisplay primary's Blue + VkXYColorEXT whitePointDisplay primary's Blue + float maxLuminanceDisplay maximum luminance + float minLuminanceDisplay minimum luminance + From CTA 861.3 + float maxContentLightLevelContent maximum luminance + float maxFrameAverageLightLevel + + + uint64_t refreshDurationNumber of nanoseconds from the start of one refresh cycle to the next + + + uint32_t presentIDApplication-provided identifier, previously given to vkQueuePresentKHR + uint64_t desiredPresentTimeEarliest time an image should have been presented, previously given to vkQueuePresentKHR + uint64_t actualPresentTimeTime the image was actually displayed + uint64_t earliestPresentTimeEarliest time the image could have been displayed + uint64_t presentMarginHow early vkQueuePresentKHR was processed vs. how soon it needed to be and make earliestPresentTime + + + VkStructureType sType + const void* pNext + uint32_t swapchainCountCopy of VkPresentInfoKHR::swapchainCount + const VkPresentTimeGOOGLE* pTimesThe earliest times to present images + + + uint32_t presentIDApplication-provided identifier + uint64_t desiredPresentTimeEarliest time an image should be presented + + + VkStructureType sType + const void* pNext + VkIOSSurfaceCreateFlagsMVK flags + const void* pView + + + VkStructureType sType + const void* pNext + VkMacOSSurfaceCreateFlagsMVK flags + const void* pView + + + float xcoeff + float ycoeff + + + VkStructureType sType + const void* pNext + VkBool32 viewportWScalingEnable + uint32_t viewportCount + const VkViewportWScalingNV* pViewportWScalings + + + VkViewportCoordinateSwizzleNV x + VkViewportCoordinateSwizzleNV y + VkViewportCoordinateSwizzleNV z + VkViewportCoordinateSwizzleNV w + + + VkStructureType sType + const void* pNext + VkPipelineViewportSwizzleStateCreateFlagsNV flags + uint32_t viewportCount + const VkViewportSwizzleNV* pViewportSwizzles + + + VkStructureType sType + void* pNext + uint32_t maxDiscardRectanglesmax number of active discard rectangles + + + VkStructureType sType + const void* pNext + VkPipelineDiscardRectangleStateCreateFlagsEXT flags + VkDiscardRectangleModeEXT discardRectangleMode + uint32_t discardRectangleCount + const VkRect2D* pDiscardRectangles + + + VkStructureType sType + void* pNext + VkBool32 perViewPositionAllComponents + + + uint32_t subpass + uint32_t inputAttachmentIndex + VkImageAspectFlags aspectMask + + + + VkStructureType sType + const void* pNext + uint32_t aspectReferenceCount + const VkInputAttachmentAspectReference* pAspectReferences + + + + VkStructureType sType + const void* pNext + VkSurfaceKHR surface + + + VkStructureType sType + void* pNext + VkSurfaceCapabilitiesKHR surfaceCapabilities + + + VkStructureType sType + void* pNext + VkSurfaceFormatKHR surfaceFormat + + + VkStructureType sType + void* pNext + VkImageUsageFlags sharedPresentSupportedUsageFlagsSupported image usage flags if swapchain created using a shared present mode + + + VkStructureType sType + void* pNext + VkBool32 storageBuffer16BitAccess16-bit integer/floating-point variables supported in BufferBlock + VkBool32 uniformAndStorageBuffer16BitAccess16-bit integer/floating-point variables supported in BufferBlock and Block + VkBool32 storagePushConstant1616-bit integer/floating-point variables supported in PushConstant + VkBool32 storageInputOutput1616-bit integer/floating-point variables supported in shader inputs and outputs + + + + VkStructureType sType + void* pNext + uint32_t subgroupSizeThe size of a subgroup for this queue. + VkShaderStageFlags supportedStagesBitfield of what shader stages support subgroup operations + VkSubgroupFeatureFlags supportedOperationsBitfield of what subgroup operations are supported. + VkBool32 quadOperationsInAllStagesFlag to specify whether quad operations are available in all stages. + + + VkStructureType sType + const void* pNext + VkBuffer buffer + + + + VkStructureType sType + const void* pNext + VkImage image + + + + VkStructureType sType + const void* pNext + VkImage image + + + + VkStructureType sType + void* pNext + VkMemoryRequirements memoryRequirements + + + + VkStructureType sType + void* pNext + VkSparseImageMemoryRequirements memoryRequirements + + + + VkStructureType sType + void* pNext + VkPointClippingBehavior pointClippingBehavior + + + + VkStructureType sType + void* pNext + VkBool32 prefersDedicatedAllocation + VkBool32 requiresDedicatedAllocation + + + + VkStructureType sType + const void* pNext + VkImage imageImage that this allocation will be bound to + VkBuffer bufferBuffer that this allocation will be bound to + + + + VkStructureType sType + const void* pNext + VkImageUsageFlags usage + + + + VkStructureType sType + const void* pNext + VkTessellationDomainOrigin domainOrigin + + + + VkStructureType sType + const void* pNext + VkSamplerYcbcrConversion conversion + + + + VkStructureType sType + const void* pNext + VkFormat format + VkSamplerYcbcrModelConversion ycbcrModel + VkSamplerYcbcrRange ycbcrRange + VkComponentMapping components + VkChromaLocation xChromaOffset + VkChromaLocation yChromaOffset + VkFilter chromaFilter + VkBool32 forceExplicitReconstruction + + + + VkStructureType sType + const void* pNext + VkImageAspectFlagBits planeAspect + + + + VkStructureType sType + const void* pNext + VkImageAspectFlagBits planeAspect + + + + VkStructureType sType + void* pNext + VkBool32 samplerYcbcrConversionSampler color conversion supported + + + + VkStructureType sType + void* pNext + uint32_t combinedImageSamplerDescriptorCount + + + + VkStructureType sType + void* pNext + VkBool32 supportsTextureGatherLODBiasAMD + + + VkStructureType sType + const void* pNext + VkBool32 protectedSubmitSubmit protected command buffers + + + VkStructureType sType + void* pNext + VkBool32 protectedMemory + + + VkStructureType sType + void* pNext + VkBool32 protectedNoFault + + + VkStructureType sType + const void* pNext + VkDeviceQueueCreateFlags flags + uint32_t queueFamilyIndex + uint32_t queueIndex + + + VkStructureType sType + const void* pNext + VkPipelineCoverageToColorStateCreateFlagsNV flags + VkBool32 coverageToColorEnable + uint32_t coverageToColorLocation + + + VkStructureType sType + void* pNext + VkBool32 filterMinmaxSingleComponentFormats + VkBool32 filterMinmaxImageComponentMapping + + + float x + float y + + + VkStructureType sType + const void* pNext + VkSampleCountFlagBits sampleLocationsPerPixel + VkExtent2D sampleLocationGridSize + uint32_t sampleLocationsCount + const VkSampleLocationEXT* pSampleLocations + + + uint32_t attachmentIndex + VkSampleLocationsInfoEXT sampleLocationsInfo + + + uint32_t subpassIndex + VkSampleLocationsInfoEXT sampleLocationsInfo + + + VkStructureType sType + const void* pNext + uint32_t attachmentInitialSampleLocationsCount + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations + uint32_t postSubpassSampleLocationsCount + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations + + + VkStructureType sType + const void* pNext + VkBool32 sampleLocationsEnable + VkSampleLocationsInfoEXT sampleLocationsInfo + + + VkStructureType sType + void* pNext + VkSampleCountFlags sampleLocationSampleCounts + VkExtent2D maxSampleLocationGridSize + float sampleLocationCoordinateRange[2] + uint32_t sampleLocationSubPixelBits + VkBool32 variableSampleLocations + + + VkStructureType sType + void* pNext + VkExtent2D maxSampleLocationGridSize + + + VkStructureType sType + const void* pNext + VkSamplerReductionModeEXT reductionMode + + + VkStructureType sType + void* pNext + VkBool32 advancedBlendCoherentOperations + + + VkStructureType sType + void* pNext + uint32_t advancedBlendMaxColorAttachments + VkBool32 advancedBlendIndependentBlend + VkBool32 advancedBlendNonPremultipliedSrcColor + VkBool32 advancedBlendNonPremultipliedDstColor + VkBool32 advancedBlendCorrelatedOverlap + VkBool32 advancedBlendAllOperations + + + VkStructureType sType + const void* pNext + VkBool32 srcPremultiplied + VkBool32 dstPremultiplied + VkBlendOverlapEXT blendOverlap + + + VkStructureType sType + const void* pNext + VkPipelineCoverageModulationStateCreateFlagsNV flags + VkCoverageModulationModeNV coverageModulationMode + VkBool32 coverageModulationTableEnable + uint32_t coverageModulationTableCount + const float* pCoverageModulationTable + + + VkStructureType sType + const void* pNext + uint32_t viewFormatCount + const VkFormat* pViewFormats + + + VkStructureType sType + const void* pNext + VkValidationCacheCreateFlagsEXT flags + size_t initialDataSize + const void* pInitialData + + + VkStructureType sType + const void* pNext + VkValidationCacheEXT validationCache + + + VkStructureType sType + void* pNext + uint32_t maxPerSetDescriptors + VkDeviceSize maxMemoryAllocationSize + + + + VkStructureType sType + void* pNext + VkBool32 supported + + + + VkStructureType sType + void* pNext + VkBool32 shaderDrawParameters + + + VkStructureType sType + const void* pNext + const void* handle + int stride + int format + int usage + + + uint32_t numUsedVgprs + uint32_t numUsedSgprs + uint32_t ldsSizePerLocalWorkGroup + size_t ldsUsageSizeInBytes + size_t scratchMemUsageInBytes + + + VkShaderStageFlags shaderStageMask + VkShaderResourceUsageAMD resourceUsage + uint32_t numPhysicalVgprs + uint32_t numPhysicalSgprs + uint32_t numAvailableVgprs + uint32_t numAvailableSgprs + uint32_t computeWorkGroupSize[3] + + + VkStructureType sType + const void* pNext + VkQueueGlobalPriorityEXT globalPriority + + + VkStructureType sType + const void* pNext + VkObjectType objectType + uint64_t objectHandle + const char* pObjectName + + + VkStructureType sType + const void* pNext + VkObjectType objectType + uint64_t objectHandle + uint64_t tagName + size_t tagSize + const void* pTag + + + VkStructureType sType + const void* pNext + const char* pLabelName + float color[4] + + + VkStructureType sType + const void* pNext + VkDebugUtilsMessengerCreateFlagsEXT flags + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity + VkDebugUtilsMessageTypeFlagsEXT messageType + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback + void* pUserData + + + VkStructureType sType + const void* pNext + VkDebugUtilsMessengerCallbackDataFlagsEXT flags + const char* pMessageIdName + int32_t messageIdNumber + const char* pMessage + uint32_t queueLabelCount + VkDebugUtilsLabelEXT* pQueueLabels + uint32_t cmdBufLabelCount + VkDebugUtilsLabelEXT* pCmdBufLabels + uint32_t objectCount + VkDebugUtilsObjectNameInfoEXT* pObjects + + + VkStructureType sType + const void* pNext + VkExternalMemoryHandleTypeFlagBits handleType + void* pHostPointer + + + VkStructureType sType + void* pNext + uint32_t memoryTypeBits + + + VkStructureType sType + void* pNext + VkDeviceSize minImportedHostPointerAlignment + + + VkStructureType sType + void* pNextPointer to next structure + float primitiveOverestimationSizeThe size in pixels the primitive is enlarged at each edge during conservative rasterization + float maxExtraPrimitiveOverestimationSizeThe maximum additional overestimation the client can specify in the pipeline state + float extraPrimitiveOverestimationSizeGranularityThe granularity of extra overestimation sizes the implementations supports between 0 and maxExtraOverestimationSize + VkBool32 primitiveUnderestimationtrue if the implementation supports conservative rasterization underestimation mode + VkBool32 conservativePointAndLineRasterizationtrue if conservative rasterization also applies to points and lines + VkBool32 degenerateTrianglesRasterizedtrue if degenerate triangles (those with zero area after snap) are rasterized + VkBool32 degenerateLinesRasterizedtrue if degenerate lines (those with zero length after snap) are rasterized + VkBool32 fullyCoveredFragmentShaderInputVariabletrue if the implementation supports the FullyCoveredEXT SPIR-V builtin fragment shader input variable + VkBool32 conservativeRasterizationPostDepthCoveragetrue if the implementation supports both conservative rasterization and post depth coverage sample coverage mask + + + VkStructureType sType + void* pNextPointer to next structure + uint32_t shaderEngineCountnumber of shader engines + uint32_t shaderArraysPerEngineCountnumber of shader arrays + uint32_t computeUnitsPerShaderArraynumber of CUs per shader array + uint32_t simdPerComputeUnitnumber of SIMDs per compute unit + uint32_t wavefrontsPerSimdnumber of wavefront slots in each SIMD + uint32_t wavefrontSizenumber of threads per wavefront + uint32_t sgprsPerSimdnumber of physical SGPRs per SIMD + uint32_t minSgprAllocationminimum number of SGPRs that can be allocated by a wave + uint32_t maxSgprAllocationnumber of available SGPRs + uint32_t sgprAllocationGranularitySGPRs are allocated in groups of this size + uint32_t vgprsPerSimdnumber of physical VGPRs per SIMD + uint32_t minVgprAllocationminimum number of VGPRs that can be allocated by a wave + uint32_t maxVgprAllocationnumber of available VGPRs + uint32_t vgprAllocationGranularityVGPRs are allocated in groups of this size + + + VkStructureType sType + const void* pNext + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags + VkConservativeRasterizationModeEXT conservativeRasterizationMode + float extraPrimitiveOverestimationSize + + + VkStructureType sType + void* pNext + VkBool32 shaderInputAttachmentArrayDynamicIndexing + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing + VkBool32 shaderUniformBufferArrayNonUniformIndexing + VkBool32 shaderSampledImageArrayNonUniformIndexing + VkBool32 shaderStorageBufferArrayNonUniformIndexing + VkBool32 shaderStorageImageArrayNonUniformIndexing + VkBool32 shaderInputAttachmentArrayNonUniformIndexing + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing + VkBool32 descriptorBindingUniformBufferUpdateAfterBind + VkBool32 descriptorBindingSampledImageUpdateAfterBind + VkBool32 descriptorBindingStorageImageUpdateAfterBind + VkBool32 descriptorBindingStorageBufferUpdateAfterBind + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind + VkBool32 descriptorBindingUpdateUnusedWhilePending + VkBool32 descriptorBindingPartiallyBound + VkBool32 descriptorBindingVariableDescriptorCount + VkBool32 runtimeDescriptorArray + + + VkStructureType sType + void* pNext + uint32_t maxUpdateAfterBindDescriptorsInAllPools + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative + VkBool32 shaderSampledImageArrayNonUniformIndexingNative + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative + VkBool32 shaderStorageImageArrayNonUniformIndexingNative + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative + VkBool32 robustBufferAccessUpdateAfterBind + VkBool32 quadDivergentImplicitLod + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments + uint32_t maxPerStageUpdateAfterBindResources + uint32_t maxDescriptorSetUpdateAfterBindSamplers + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic + uint32_t maxDescriptorSetUpdateAfterBindSampledImages + uint32_t maxDescriptorSetUpdateAfterBindStorageImages + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments + + + VkStructureType sType + const void* pNext + uint32_t bindingCount + const VkDescriptorBindingFlagsEXT* pBindingFlags + + + VkStructureType sType + const void* pNext + uint32_t descriptorSetCount + const uint32_t* pDescriptorCounts + + + VkStructureType sType + void* pNext + uint32_t maxVariableDescriptorCount + + + uint32_t binding + uint32_t divisor + + + VkStructureType sType + const void* pNext + uint32_t vertexBindingDivisorCount + const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors + + + VkStructureType sType + void* pNext + uint32_t maxVertexAttribDivisormax value of vertex attribute divisor + + + VkStructureType sType + const void* pNext + struct AHardwareBuffer* buffer + + + VkStructureType sType + void* pNext + uint64_t androidHardwareBufferUsage + + + VkStructureType sType + void* pNext + VkDeviceSize allocationSize + uint32_t memoryTypeBits + + + VkStructureType sType + const void* pNext + VkDeviceMemory memory + + + VkStructureType sType + void* pNext + VkFormat format + uint64_t externalFormat + VkFormatFeatureFlags formatFeatures + VkComponentMapping samplerYcbcrConversionComponents + VkSamplerYcbcrModelConversion suggestedYcbcrModel + VkSamplerYcbcrRange suggestedYcbcrRange + VkChromaLocation suggestedXChromaOffset + VkChromaLocation suggestedYChromaOffset + + + VkStructureType sType + void* pNext + uint64_t externalFormat + + + + Vulkan enumerant (token) definitions + + + + + + + + + + + + + + + + + + + + + + + + + + + + Unlike OpenGL, most tokens in Vulkan are actual typed enumerants in + their own numeric namespaces. The "name" attribute is the C enum + type name, and is pulled in from a type tag definition above + (slightly clunky, but retains the type / enum distinction). "type" + attributes of "enum" or "bitmask" indicate that these values should + be generated inside an appropriate definition. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + value="4" reserved for VK_KHR_sampler_mirror_clamp_to_edge + enum VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; do not + alias! + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Return codes (positive values) + + + + + + + Error codes (negative values) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Flags + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + WSI Extensions + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Placeholder for validation enums to be defined for VK_EXT_Validation_flags extension + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + VkResult vkCreateInstance + const VkInstanceCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkInstance* pInstance + + + void vkDestroyInstance + VkInstance instance + const VkAllocationCallbacks* pAllocator + + + VkResult vkEnumeratePhysicalDevices + VkInstance instance + uint32_t* pPhysicalDeviceCount + VkPhysicalDevice* pPhysicalDevices + + + PFN_vkVoidFunction vkGetDeviceProcAddr + VkDevice device + const char* pName + + + PFN_vkVoidFunction vkGetInstanceProcAddr + VkInstance instance + const char* pName + + + void vkGetPhysicalDeviceProperties + VkPhysicalDevice physicalDevice + VkPhysicalDeviceProperties* pProperties + + + void vkGetPhysicalDeviceQueueFamilyProperties + VkPhysicalDevice physicalDevice + uint32_t* pQueueFamilyPropertyCount + VkQueueFamilyProperties* pQueueFamilyProperties + + + void vkGetPhysicalDeviceMemoryProperties + VkPhysicalDevice physicalDevice + VkPhysicalDeviceMemoryProperties* pMemoryProperties + + + void vkGetPhysicalDeviceFeatures + VkPhysicalDevice physicalDevice + VkPhysicalDeviceFeatures* pFeatures + + + void vkGetPhysicalDeviceFormatProperties + VkPhysicalDevice physicalDevice + VkFormat format + VkFormatProperties* pFormatProperties + + + VkResult vkGetPhysicalDeviceImageFormatProperties + VkPhysicalDevice physicalDevice + VkFormat format + VkImageType type + VkImageTiling tiling + VkImageUsageFlags usage + VkImageCreateFlags flags + VkImageFormatProperties* pImageFormatProperties + + + VkResult vkCreateDevice + VkPhysicalDevice physicalDevice + const VkDeviceCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkDevice* pDevice + + + void vkDestroyDevice + VkDevice device + const VkAllocationCallbacks* pAllocator + + + VkResult vkEnumerateInstanceVersion + uint32_t* pApiVersion + + + VkResult vkEnumerateInstanceLayerProperties + uint32_t* pPropertyCount + VkLayerProperties* pProperties + + + VkResult vkEnumerateInstanceExtensionProperties + const char* pLayerName + uint32_t* pPropertyCount + VkExtensionProperties* pProperties + + + VkResult vkEnumerateDeviceLayerProperties + VkPhysicalDevice physicalDevice + uint32_t* pPropertyCount + VkLayerProperties* pProperties + + + VkResult vkEnumerateDeviceExtensionProperties + VkPhysicalDevice physicalDevice + const char* pLayerName + uint32_t* pPropertyCount + VkExtensionProperties* pProperties + + + void vkGetDeviceQueue + VkDevice device + uint32_t queueFamilyIndex + uint32_t queueIndex + VkQueue* pQueue + + + VkResult vkQueueSubmit + VkQueue queue + uint32_t submitCount + const VkSubmitInfo* pSubmits + VkFence fence + + + VkResult vkQueueWaitIdle + VkQueue queue + + + VkResult vkDeviceWaitIdle + VkDevice device + + all sname:VkQueue objects created from pname:device + + + + VkResult vkAllocateMemory + VkDevice device + const VkMemoryAllocateInfo* pAllocateInfo + const VkAllocationCallbacks* pAllocator + VkDeviceMemory* pMemory + + + void vkFreeMemory + VkDevice device + VkDeviceMemory memory + const VkAllocationCallbacks* pAllocator + + + VkResult vkMapMemory + VkDevice device + VkDeviceMemory memory + VkDeviceSize offset + VkDeviceSize size + VkMemoryMapFlags flags + void** ppData + + + void vkUnmapMemory + VkDevice device + VkDeviceMemory memory + + + VkResult vkFlushMappedMemoryRanges + VkDevice device + uint32_t memoryRangeCount + const VkMappedMemoryRange* pMemoryRanges + + + VkResult vkInvalidateMappedMemoryRanges + VkDevice device + uint32_t memoryRangeCount + const VkMappedMemoryRange* pMemoryRanges + + + void vkGetDeviceMemoryCommitment + VkDevice device + VkDeviceMemory memory + VkDeviceSize* pCommittedMemoryInBytes + + + void vkGetBufferMemoryRequirements + VkDevice device + VkBuffer buffer + VkMemoryRequirements* pMemoryRequirements + + + VkResult vkBindBufferMemory + VkDevice device + VkBuffer buffer + VkDeviceMemory memory + VkDeviceSize memoryOffset + + + void vkGetImageMemoryRequirements + VkDevice device + VkImage image + VkMemoryRequirements* pMemoryRequirements + + + VkResult vkBindImageMemory + VkDevice device + VkImage image + VkDeviceMemory memory + VkDeviceSize memoryOffset + + + void vkGetImageSparseMemoryRequirements + VkDevice device + VkImage image + uint32_t* pSparseMemoryRequirementCount + VkSparseImageMemoryRequirements* pSparseMemoryRequirements + + + void vkGetPhysicalDeviceSparseImageFormatProperties + VkPhysicalDevice physicalDevice + VkFormat format + VkImageType type + VkSampleCountFlagBits samples + VkImageUsageFlags usage + VkImageTiling tiling + uint32_t* pPropertyCount + VkSparseImageFormatProperties* pProperties + + + VkResult vkQueueBindSparse + VkQueue queue + uint32_t bindInfoCount + const VkBindSparseInfo* pBindInfo + VkFence fence + + + VkResult vkCreateFence + VkDevice device + const VkFenceCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkFence* pFence + + + void vkDestroyFence + VkDevice device + VkFence fence + const VkAllocationCallbacks* pAllocator + + + VkResult vkResetFences + VkDevice device + uint32_t fenceCount + const VkFence* pFences + + + VkResult vkGetFenceStatus + VkDevice device + VkFence fence + + + VkResult vkWaitForFences + VkDevice device + uint32_t fenceCount + const VkFence* pFences + VkBool32 waitAll + uint64_t timeout + + + VkResult vkCreateSemaphore + VkDevice device + const VkSemaphoreCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSemaphore* pSemaphore + + + void vkDestroySemaphore + VkDevice device + VkSemaphore semaphore + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateEvent + VkDevice device + const VkEventCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkEvent* pEvent + + + void vkDestroyEvent + VkDevice device + VkEvent event + const VkAllocationCallbacks* pAllocator + + + VkResult vkGetEventStatus + VkDevice device + VkEvent event + + + VkResult vkSetEvent + VkDevice device + VkEvent event + + + VkResult vkResetEvent + VkDevice device + VkEvent event + + + VkResult vkCreateQueryPool + VkDevice device + const VkQueryPoolCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkQueryPool* pQueryPool + + + void vkDestroyQueryPool + VkDevice device + VkQueryPool queryPool + const VkAllocationCallbacks* pAllocator + + + VkResult vkGetQueryPoolResults + VkDevice device + VkQueryPool queryPool + uint32_t firstQuery + uint32_t queryCount + size_t dataSize + void* pData + VkDeviceSize stride + VkQueryResultFlags flags + + + VkResult vkCreateBuffer + VkDevice device + const VkBufferCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkBuffer* pBuffer + + + void vkDestroyBuffer + VkDevice device + VkBuffer buffer + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateBufferView + VkDevice device + const VkBufferViewCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkBufferView* pView + + + void vkDestroyBufferView + VkDevice device + VkBufferView bufferView + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateImage + VkDevice device + const VkImageCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkImage* pImage + + + void vkDestroyImage + VkDevice device + VkImage image + const VkAllocationCallbacks* pAllocator + + + void vkGetImageSubresourceLayout + VkDevice device + VkImage image + const VkImageSubresource* pSubresource + VkSubresourceLayout* pLayout + + + VkResult vkCreateImageView + VkDevice device + const VkImageViewCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkImageView* pView + + + void vkDestroyImageView + VkDevice device + VkImageView imageView + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateShaderModule + VkDevice device + const VkShaderModuleCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkShaderModule* pShaderModule + + + void vkDestroyShaderModule + VkDevice device + VkShaderModule shaderModule + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreatePipelineCache + VkDevice device + const VkPipelineCacheCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkPipelineCache* pPipelineCache + + + void vkDestroyPipelineCache + VkDevice device + VkPipelineCache pipelineCache + const VkAllocationCallbacks* pAllocator + + + VkResult vkGetPipelineCacheData + VkDevice device + VkPipelineCache pipelineCache + size_t* pDataSize + void* pData + + + VkResult vkMergePipelineCaches + VkDevice device + VkPipelineCache dstCache + uint32_t srcCacheCount + const VkPipelineCache* pSrcCaches + + + VkResult vkCreateGraphicsPipelines + VkDevice device + VkPipelineCache pipelineCache + uint32_t createInfoCount + const VkGraphicsPipelineCreateInfo* pCreateInfos + const VkAllocationCallbacks* pAllocator + VkPipeline* pPipelines + + + VkResult vkCreateComputePipelines + VkDevice device + VkPipelineCache pipelineCache + uint32_t createInfoCount + const VkComputePipelineCreateInfo* pCreateInfos + const VkAllocationCallbacks* pAllocator + VkPipeline* pPipelines + + + void vkDestroyPipeline + VkDevice device + VkPipeline pipeline + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreatePipelineLayout + VkDevice device + const VkPipelineLayoutCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkPipelineLayout* pPipelineLayout + + + void vkDestroyPipelineLayout + VkDevice device + VkPipelineLayout pipelineLayout + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateSampler + VkDevice device + const VkSamplerCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSampler* pSampler + + + void vkDestroySampler + VkDevice device + VkSampler sampler + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateDescriptorSetLayout + VkDevice device + const VkDescriptorSetLayoutCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkDescriptorSetLayout* pSetLayout + + + void vkDestroyDescriptorSetLayout + VkDevice device + VkDescriptorSetLayout descriptorSetLayout + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateDescriptorPool + VkDevice device + const VkDescriptorPoolCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkDescriptorPool* pDescriptorPool + + + void vkDestroyDescriptorPool + VkDevice device + VkDescriptorPool descriptorPool + const VkAllocationCallbacks* pAllocator + + + VkResult vkResetDescriptorPool + VkDevice device + VkDescriptorPool descriptorPool + VkDescriptorPoolResetFlags flags + + any sname:VkDescriptorSet objects allocated from pname:descriptorPool + + + + VkResult vkAllocateDescriptorSets + VkDevice device + const VkDescriptorSetAllocateInfo* pAllocateInfo + VkDescriptorSet* pDescriptorSets + + + VkResult vkFreeDescriptorSets + VkDevice device + VkDescriptorPool descriptorPool + uint32_t descriptorSetCount + const VkDescriptorSet* pDescriptorSets + + + void vkUpdateDescriptorSets + VkDevice device + uint32_t descriptorWriteCount + const VkWriteDescriptorSet* pDescriptorWrites + uint32_t descriptorCopyCount + const VkCopyDescriptorSet* pDescriptorCopies + + + VkResult vkCreateFramebuffer + VkDevice device + const VkFramebufferCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkFramebuffer* pFramebuffer + + + void vkDestroyFramebuffer + VkDevice device + VkFramebuffer framebuffer + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateRenderPass + VkDevice device + const VkRenderPassCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkRenderPass* pRenderPass + + + void vkDestroyRenderPass + VkDevice device + VkRenderPass renderPass + const VkAllocationCallbacks* pAllocator + + + void vkGetRenderAreaGranularity + VkDevice device + VkRenderPass renderPass + VkExtent2D* pGranularity + + + VkResult vkCreateCommandPool + VkDevice device + const VkCommandPoolCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkCommandPool* pCommandPool + + + void vkDestroyCommandPool + VkDevice device + VkCommandPool commandPool + const VkAllocationCallbacks* pAllocator + + + VkResult vkResetCommandPool + VkDevice device + VkCommandPool commandPool + VkCommandPoolResetFlags flags + + + VkResult vkAllocateCommandBuffers + VkDevice device + const VkCommandBufferAllocateInfo* pAllocateInfo + VkCommandBuffer* pCommandBuffers + + + void vkFreeCommandBuffers + VkDevice device + VkCommandPool commandPool + uint32_t commandBufferCount + const VkCommandBuffer* pCommandBuffers + + + VkResult vkBeginCommandBuffer + VkCommandBuffer commandBuffer + const VkCommandBufferBeginInfo* pBeginInfo + + the sname:VkCommandPool that pname:commandBuffer was allocated from + + + + VkResult vkEndCommandBuffer + VkCommandBuffer commandBuffer + + the sname:VkCommandPool that pname:commandBuffer was allocated from + + + + VkResult vkResetCommandBuffer + VkCommandBuffer commandBuffer + VkCommandBufferResetFlags flags + + + void vkCmdBindPipeline + VkCommandBuffer commandBuffer + VkPipelineBindPoint pipelineBindPoint + VkPipeline pipeline + + + void vkCmdSetViewport + VkCommandBuffer commandBuffer + uint32_t firstViewport + uint32_t viewportCount + const VkViewport* pViewports + + + void vkCmdSetScissor + VkCommandBuffer commandBuffer + uint32_t firstScissor + uint32_t scissorCount + const VkRect2D* pScissors + + + void vkCmdSetLineWidth + VkCommandBuffer commandBuffer + float lineWidth + + + void vkCmdSetDepthBias + VkCommandBuffer commandBuffer + float depthBiasConstantFactor + float depthBiasClamp + float depthBiasSlopeFactor + + + void vkCmdSetBlendConstants + VkCommandBuffer commandBuffer + const float blendConstants[4] + + + void vkCmdSetDepthBounds + VkCommandBuffer commandBuffer + float minDepthBounds + float maxDepthBounds + + + void vkCmdSetStencilCompareMask + VkCommandBuffer commandBuffer + VkStencilFaceFlags faceMask + uint32_t compareMask + + + void vkCmdSetStencilWriteMask + VkCommandBuffer commandBuffer + VkStencilFaceFlags faceMask + uint32_t writeMask + + + void vkCmdSetStencilReference + VkCommandBuffer commandBuffer + VkStencilFaceFlags faceMask + uint32_t reference + + + void vkCmdBindDescriptorSets + VkCommandBuffer commandBuffer + VkPipelineBindPoint pipelineBindPoint + VkPipelineLayout layout + uint32_t firstSet + uint32_t descriptorSetCount + const VkDescriptorSet* pDescriptorSets + uint32_t dynamicOffsetCount + const uint32_t* pDynamicOffsets + + + void vkCmdBindIndexBuffer + VkCommandBuffer commandBuffer + VkBuffer buffer + VkDeviceSize offset + VkIndexType indexType + + + void vkCmdBindVertexBuffers + VkCommandBuffer commandBuffer + uint32_t firstBinding + uint32_t bindingCount + const VkBuffer* pBuffers + const VkDeviceSize* pOffsets + + + void vkCmdDraw + VkCommandBuffer commandBuffer + uint32_t vertexCount + uint32_t instanceCount + uint32_t firstVertex + uint32_t firstInstance + + + void vkCmdDrawIndexed + VkCommandBuffer commandBuffer + uint32_t indexCount + uint32_t instanceCount + uint32_t firstIndex + int32_t vertexOffset + uint32_t firstInstance + + + void vkCmdDrawIndirect + VkCommandBuffer commandBuffer + VkBuffer buffer + VkDeviceSize offset + uint32_t drawCount + uint32_t stride + + + void vkCmdDrawIndexedIndirect + VkCommandBuffer commandBuffer + VkBuffer buffer + VkDeviceSize offset + uint32_t drawCount + uint32_t stride + + + void vkCmdDispatch + VkCommandBuffer commandBuffer + uint32_t groupCountX + uint32_t groupCountY + uint32_t groupCountZ + + + void vkCmdDispatchIndirect + VkCommandBuffer commandBuffer + VkBuffer buffer + VkDeviceSize offset + + + void vkCmdCopyBuffer + VkCommandBuffer commandBuffer + VkBuffer srcBuffer + VkBuffer dstBuffer + uint32_t regionCount + const VkBufferCopy* pRegions + + + void vkCmdCopyImage + VkCommandBuffer commandBuffer + VkImage srcImage + VkImageLayout srcImageLayout + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkImageCopy* pRegions + + + void vkCmdBlitImage + VkCommandBuffer commandBuffer + VkImage srcImage + VkImageLayout srcImageLayout + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkImageBlit* pRegions + VkFilter filter + + + void vkCmdCopyBufferToImage + VkCommandBuffer commandBuffer + VkBuffer srcBuffer + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkBufferImageCopy* pRegions + + + void vkCmdCopyImageToBuffer + VkCommandBuffer commandBuffer + VkImage srcImage + VkImageLayout srcImageLayout + VkBuffer dstBuffer + uint32_t regionCount + const VkBufferImageCopy* pRegions + + + void vkCmdUpdateBuffer + VkCommandBuffer commandBuffer + VkBuffer dstBuffer + VkDeviceSize dstOffset + VkDeviceSize dataSize + const void* pData + + + void vkCmdFillBuffer + VkCommandBuffer commandBuffer + VkBuffer dstBuffer + VkDeviceSize dstOffset + VkDeviceSize size + uint32_t data + + + void vkCmdClearColorImage + VkCommandBuffer commandBuffer + VkImage image + VkImageLayout imageLayout + const VkClearColorValue* pColor + uint32_t rangeCount + const VkImageSubresourceRange* pRanges + + + void vkCmdClearDepthStencilImage + VkCommandBuffer commandBuffer + VkImage image + VkImageLayout imageLayout + const VkClearDepthStencilValue* pDepthStencil + uint32_t rangeCount + const VkImageSubresourceRange* pRanges + + + void vkCmdClearAttachments + VkCommandBuffer commandBuffer + uint32_t attachmentCount + const VkClearAttachment* pAttachments + uint32_t rectCount + const VkClearRect* pRects + + + void vkCmdResolveImage + VkCommandBuffer commandBuffer + VkImage srcImage + VkImageLayout srcImageLayout + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkImageResolve* pRegions + + + void vkCmdSetEvent + VkCommandBuffer commandBuffer + VkEvent event + VkPipelineStageFlags stageMask + + + void vkCmdResetEvent + VkCommandBuffer commandBuffer + VkEvent event + VkPipelineStageFlags stageMask + + + void vkCmdWaitEvents + VkCommandBuffer commandBuffer + uint32_t eventCount + const VkEvent* pEvents + VkPipelineStageFlags srcStageMask + VkPipelineStageFlags dstStageMask + uint32_t memoryBarrierCount + const VkMemoryBarrier* pMemoryBarriers + uint32_t bufferMemoryBarrierCount + const VkBufferMemoryBarrier* pBufferMemoryBarriers + uint32_t imageMemoryBarrierCount + const VkImageMemoryBarrier* pImageMemoryBarriers + + + void vkCmdPipelineBarrier + VkCommandBuffer commandBuffer + VkPipelineStageFlags srcStageMask + VkPipelineStageFlags dstStageMask + VkDependencyFlags dependencyFlags + uint32_t memoryBarrierCount + const VkMemoryBarrier* pMemoryBarriers + uint32_t bufferMemoryBarrierCount + const VkBufferMemoryBarrier* pBufferMemoryBarriers + uint32_t imageMemoryBarrierCount + const VkImageMemoryBarrier* pImageMemoryBarriers + + + void vkCmdBeginQuery + VkCommandBuffer commandBuffer + VkQueryPool queryPool + uint32_t query + VkQueryControlFlags flags + + + void vkCmdEndQuery + VkCommandBuffer commandBuffer + VkQueryPool queryPool + uint32_t query + + + void vkCmdResetQueryPool + VkCommandBuffer commandBuffer + VkQueryPool queryPool + uint32_t firstQuery + uint32_t queryCount + + + void vkCmdWriteTimestamp + VkCommandBuffer commandBuffer + VkPipelineStageFlagBits pipelineStage + VkQueryPool queryPool + uint32_t query + + + void vkCmdCopyQueryPoolResults + VkCommandBuffer commandBuffer + VkQueryPool queryPool + uint32_t firstQuery + uint32_t queryCount + VkBuffer dstBuffer + VkDeviceSize dstOffset + VkDeviceSize stride + VkQueryResultFlags flags + + + void vkCmdPushConstants + VkCommandBuffer commandBuffer + VkPipelineLayout layout + VkShaderStageFlags stageFlags + uint32_t offset + uint32_t size + const void* pValues + + + void vkCmdBeginRenderPass + VkCommandBuffer commandBuffer + const VkRenderPassBeginInfo* pRenderPassBegin + VkSubpassContents contents + + + void vkCmdNextSubpass + VkCommandBuffer commandBuffer + VkSubpassContents contents + + + void vkCmdEndRenderPass + VkCommandBuffer commandBuffer + + + void vkCmdExecuteCommands + VkCommandBuffer commandBuffer + uint32_t commandBufferCount + const VkCommandBuffer* pCommandBuffers + + + VkResult vkCreateAndroidSurfaceKHR + VkInstance instance + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR + VkPhysicalDevice physicalDevice + uint32_t* pPropertyCount + VkDisplayPropertiesKHR* pProperties + + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR + VkPhysicalDevice physicalDevice + uint32_t* pPropertyCount + VkDisplayPlanePropertiesKHR* pProperties + + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR + VkPhysicalDevice physicalDevice + uint32_t planeIndex + uint32_t* pDisplayCount + VkDisplayKHR* pDisplays + + + VkResult vkGetDisplayModePropertiesKHR + VkPhysicalDevice physicalDevice + VkDisplayKHR display + uint32_t* pPropertyCount + VkDisplayModePropertiesKHR* pProperties + + + VkResult vkCreateDisplayModeKHR + VkPhysicalDevice physicalDevice + VkDisplayKHR display + const VkDisplayModeCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkDisplayModeKHR* pMode + + + VkResult vkGetDisplayPlaneCapabilitiesKHR + VkPhysicalDevice physicalDevice + VkDisplayModeKHR mode + uint32_t planeIndex + VkDisplayPlaneCapabilitiesKHR* pCapabilities + + + VkResult vkCreateDisplayPlaneSurfaceKHR + VkInstance instance + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkResult vkCreateSharedSwapchainsKHR + VkDevice device + uint32_t swapchainCount + const VkSwapchainCreateInfoKHR* pCreateInfos + const VkAllocationCallbacks* pAllocator + VkSwapchainKHR* pSwapchains + + + VkResult vkCreateMirSurfaceKHR + VkInstance instance + const VkMirSurfaceCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkBool32 vkGetPhysicalDeviceMirPresentationSupportKHR + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + MirConnection* connection + + + void vkDestroySurfaceKHR + VkInstance instance + VkSurfaceKHR surface + const VkAllocationCallbacks* pAllocator + + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + VkSurfaceKHR surface + VkBool32* pSupported + + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR + VkPhysicalDevice physicalDevice + VkSurfaceKHR surface + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities + + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR + VkPhysicalDevice physicalDevice + VkSurfaceKHR surface + uint32_t* pSurfaceFormatCount + VkSurfaceFormatKHR* pSurfaceFormats + + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR + VkPhysicalDevice physicalDevice + VkSurfaceKHR surface + uint32_t* pPresentModeCount + VkPresentModeKHR* pPresentModes + + + VkResult vkCreateSwapchainKHR + VkDevice device + const VkSwapchainCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSwapchainKHR* pSwapchain + + + void vkDestroySwapchainKHR + VkDevice device + VkSwapchainKHR swapchain + const VkAllocationCallbacks* pAllocator + + + VkResult vkGetSwapchainImagesKHR + VkDevice device + VkSwapchainKHR swapchain + uint32_t* pSwapchainImageCount + VkImage* pSwapchainImages + + + VkResult vkAcquireNextImageKHR + VkDevice device + VkSwapchainKHR swapchain + uint64_t timeout + VkSemaphore semaphore + VkFence fence + uint32_t* pImageIndex + + + VkResult vkQueuePresentKHR + VkQueue queue + const VkPresentInfoKHR* pPresentInfo + + + VkResult vkCreateViSurfaceNN + VkInstance instance + const VkViSurfaceCreateInfoNN* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkResult vkCreateWaylandSurfaceKHR + VkInstance instance + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + struct wl_display* display + + + VkResult vkCreateWin32SurfaceKHR + VkInstance instance + const VkWin32SurfaceCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + + + VkResult vkCreateXlibSurfaceKHR + VkInstance instance + const VkXlibSurfaceCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + Display* dpy + VisualID visualID + + + VkResult vkCreateXcbSurfaceKHR + VkInstance instance + const VkXcbSurfaceCreateInfoKHR* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR + VkPhysicalDevice physicalDevice + uint32_t queueFamilyIndex + xcb_connection_t* connection + xcb_visualid_t visual_id + + + VkResult vkCreateDebugReportCallbackEXT + VkInstance instance + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkDebugReportCallbackEXT* pCallback + + + void vkDestroyDebugReportCallbackEXT + VkInstance instance + VkDebugReportCallbackEXT callback + const VkAllocationCallbacks* pAllocator + + + void vkDebugReportMessageEXT + VkInstance instance + VkDebugReportFlagsEXT flags + VkDebugReportObjectTypeEXT objectType + uint64_t object + size_t location + int32_t messageCode + const char* pLayerPrefix + const char* pMessage + + + VkResult vkDebugMarkerSetObjectNameEXT + VkDevice device + const VkDebugMarkerObjectNameInfoEXT* pNameInfo + + + VkResult vkDebugMarkerSetObjectTagEXT + VkDevice device + const VkDebugMarkerObjectTagInfoEXT* pTagInfo + + + void vkCmdDebugMarkerBeginEXT + VkCommandBuffer commandBuffer + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo + + + void vkCmdDebugMarkerEndEXT + VkCommandBuffer commandBuffer + + + void vkCmdDebugMarkerInsertEXT + VkCommandBuffer commandBuffer + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo + + + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV + VkPhysicalDevice physicalDevice + VkFormat format + VkImageType type + VkImageTiling tiling + VkImageUsageFlags usage + VkImageCreateFlags flags + VkExternalMemoryHandleTypeFlagsNV externalHandleType + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties + + + VkResult vkGetMemoryWin32HandleNV + VkDevice device + VkDeviceMemory memory + VkExternalMemoryHandleTypeFlagsNV handleType + HANDLE* pHandle + + + void vkCmdDrawIndirectCountAMD + VkCommandBuffer commandBuffer + VkBuffer buffer + VkDeviceSize offset + VkBuffer countBuffer + VkDeviceSize countBufferOffset + uint32_t maxDrawCount + uint32_t stride + + + void vkCmdDrawIndexedIndirectCountAMD + VkCommandBuffer commandBuffer + VkBuffer buffer + VkDeviceSize offset + VkBuffer countBuffer + VkDeviceSize countBufferOffset + uint32_t maxDrawCount + uint32_t stride + + + void vkCmdProcessCommandsNVX + VkCommandBuffer commandBuffer + const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo + + + void vkCmdReserveSpaceForCommandsNVX + VkCommandBuffer commandBuffer + const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo + + + VkResult vkCreateIndirectCommandsLayoutNVX + VkDevice device + const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout + + + void vkDestroyIndirectCommandsLayoutNVX + VkDevice device + VkIndirectCommandsLayoutNVX indirectCommandsLayout + const VkAllocationCallbacks* pAllocator + + + VkResult vkCreateObjectTableNVX + VkDevice device + const VkObjectTableCreateInfoNVX* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkObjectTableNVX* pObjectTable + + + void vkDestroyObjectTableNVX + VkDevice device + VkObjectTableNVX objectTable + const VkAllocationCallbacks* pAllocator + + + VkResult vkRegisterObjectsNVX + VkDevice device + VkObjectTableNVX objectTable + uint32_t objectCount + const VkObjectTableEntryNVX* const* ppObjectTableEntries + const uint32_t* pObjectIndices + + + VkResult vkUnregisterObjectsNVX + VkDevice device + VkObjectTableNVX objectTable + uint32_t objectCount + const VkObjectEntryTypeNVX* pObjectEntryTypes + const uint32_t* pObjectIndices + + + void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX + VkPhysicalDevice physicalDevice + VkDeviceGeneratedCommandsFeaturesNVX* pFeatures + VkDeviceGeneratedCommandsLimitsNVX* pLimits + + + void vkGetPhysicalDeviceFeatures2 + VkPhysicalDevice physicalDevice + VkPhysicalDeviceFeatures2* pFeatures + + + + void vkGetPhysicalDeviceProperties2 + VkPhysicalDevice physicalDevice + VkPhysicalDeviceProperties2* pProperties + + + + void vkGetPhysicalDeviceFormatProperties2 + VkPhysicalDevice physicalDevice + VkFormat format + VkFormatProperties2* pFormatProperties + + + + VkResult vkGetPhysicalDeviceImageFormatProperties2 + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo + VkImageFormatProperties2* pImageFormatProperties + + + + void vkGetPhysicalDeviceQueueFamilyProperties2 + VkPhysicalDevice physicalDevice + uint32_t* pQueueFamilyPropertyCount + VkQueueFamilyProperties2* pQueueFamilyProperties + + + + void vkGetPhysicalDeviceMemoryProperties2 + VkPhysicalDevice physicalDevice + VkPhysicalDeviceMemoryProperties2* pMemoryProperties + + + + void vkGetPhysicalDeviceSparseImageFormatProperties2 + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo + uint32_t* pPropertyCount + VkSparseImageFormatProperties2* pProperties + + + + void vkCmdPushDescriptorSetKHR + VkCommandBuffer commandBuffer + VkPipelineBindPoint pipelineBindPoint + VkPipelineLayout layout + uint32_t set + uint32_t descriptorWriteCount + const VkWriteDescriptorSet* pDescriptorWrites + + + void vkTrimCommandPool + VkDevice device + VkCommandPool commandPool + VkCommandPoolTrimFlags flags + + + + void vkGetPhysicalDeviceExternalBufferProperties + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo + VkExternalBufferProperties* pExternalBufferProperties + + + + VkResult vkGetMemoryWin32HandleKHR + VkDevice device + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo + HANDLE* pHandle + + + VkResult vkGetMemoryWin32HandlePropertiesKHR + VkDevice device + VkExternalMemoryHandleTypeFlagBits handleType + HANDLE handle + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties + + + VkResult vkGetMemoryFdKHR + VkDevice device + const VkMemoryGetFdInfoKHR* pGetFdInfo + int* pFd + + + VkResult vkGetMemoryFdPropertiesKHR + VkDevice device + VkExternalMemoryHandleTypeFlagBits handleType + int fd + VkMemoryFdPropertiesKHR* pMemoryFdProperties + + + void vkGetPhysicalDeviceExternalSemaphoreProperties + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo + VkExternalSemaphoreProperties* pExternalSemaphoreProperties + + + + VkResult vkGetSemaphoreWin32HandleKHR + VkDevice device + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo + HANDLE* pHandle + + + VkResult vkImportSemaphoreWin32HandleKHR + VkDevice device + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo + + + VkResult vkGetSemaphoreFdKHR + VkDevice device + const VkSemaphoreGetFdInfoKHR* pGetFdInfo + int* pFd + + + VkResult vkImportSemaphoreFdKHR + VkDevice device + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo + + + void vkGetPhysicalDeviceExternalFenceProperties + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo + VkExternalFenceProperties* pExternalFenceProperties + + + + VkResult vkGetFenceWin32HandleKHR + VkDevice device + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo + HANDLE* pHandle + + + VkResult vkImportFenceWin32HandleKHR + VkDevice device + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo + + + VkResult vkGetFenceFdKHR + VkDevice device + const VkFenceGetFdInfoKHR* pGetFdInfo + int* pFd + + + VkResult vkImportFenceFdKHR + VkDevice device + const VkImportFenceFdInfoKHR* pImportFenceFdInfo + + + VkResult vkReleaseDisplayEXT + VkPhysicalDevice physicalDevice + VkDisplayKHR display + + + VkResult vkAcquireXlibDisplayEXT + VkPhysicalDevice physicalDevice + Display* dpy + VkDisplayKHR display + + + VkResult vkGetRandROutputDisplayEXT + VkPhysicalDevice physicalDevice + Display* dpy + RROutput rrOutput + VkDisplayKHR* pDisplay + + + VkResult vkDisplayPowerControlEXT + VkDevice device + VkDisplayKHR display + const VkDisplayPowerInfoEXT* pDisplayPowerInfo + + + VkResult vkRegisterDeviceEventEXT + VkDevice device + const VkDeviceEventInfoEXT* pDeviceEventInfo + const VkAllocationCallbacks* pAllocator + VkFence* pFence + + + VkResult vkRegisterDisplayEventEXT + VkDevice device + VkDisplayKHR display + const VkDisplayEventInfoEXT* pDisplayEventInfo + const VkAllocationCallbacks* pAllocator + VkFence* pFence + + + VkResult vkGetSwapchainCounterEXT + VkDevice device + VkSwapchainKHR swapchain + VkSurfaceCounterFlagBitsEXT counter + uint64_t* pCounterValue + + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT + VkPhysicalDevice physicalDevice + VkSurfaceKHR surface + VkSurfaceCapabilities2EXT* pSurfaceCapabilities + + + VkResult vkEnumeratePhysicalDeviceGroups + VkInstance instance + uint32_t* pPhysicalDeviceGroupCount + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties + + + + void vkGetDeviceGroupPeerMemoryFeatures + VkDevice device + uint32_t heapIndex + uint32_t localDeviceIndex + uint32_t remoteDeviceIndex + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures + + + + VkResult vkBindBufferMemory2 + VkDevice device + uint32_t bindInfoCount + const VkBindBufferMemoryInfo* pBindInfos + + + + VkResult vkBindImageMemory2 + VkDevice device + uint32_t bindInfoCount + const VkBindImageMemoryInfo* pBindInfos + + + + void vkCmdSetDeviceMask + VkCommandBuffer commandBuffer + uint32_t deviceMask + + + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR + VkDevice device + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities + + + VkResult vkGetDeviceGroupSurfacePresentModesKHR + VkDevice device + VkSurfaceKHR surface + VkDeviceGroupPresentModeFlagsKHR* pModes + + + VkResult vkAcquireNextImage2KHR + VkDevice device + const VkAcquireNextImageInfoKHR* pAcquireInfo + uint32_t* pImageIndex + + + void vkCmdDispatchBase + VkCommandBuffer commandBuffer + uint32_t baseGroupX + uint32_t baseGroupY + uint32_t baseGroupZ + uint32_t groupCountX + uint32_t groupCountY + uint32_t groupCountZ + + + + VkResult vkGetPhysicalDevicePresentRectanglesKHR + VkPhysicalDevice physicalDevice + VkSurfaceKHR surface + uint32_t* pRectCount + VkRect2D* pRects + + + VkResult vkCreateDescriptorUpdateTemplate + VkDevice device + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate + + + + void vkDestroyDescriptorUpdateTemplate + VkDevice device + VkDescriptorUpdateTemplate descriptorUpdateTemplate + const VkAllocationCallbacks* pAllocator + + + + void vkUpdateDescriptorSetWithTemplate + VkDevice device + VkDescriptorSet descriptorSet + VkDescriptorUpdateTemplate descriptorUpdateTemplate + const void* pData + + + + void vkCmdPushDescriptorSetWithTemplateKHR + VkCommandBuffer commandBuffer + VkDescriptorUpdateTemplate descriptorUpdateTemplate + VkPipelineLayout layout + uint32_t set + const void* pData + + + void vkSetHdrMetadataEXT + VkDevice device + uint32_t swapchainCount + const VkSwapchainKHR* pSwapchains + const VkHdrMetadataEXT* pMetadata + + + VkResult vkGetSwapchainStatusKHR + VkDevice device + VkSwapchainKHR swapchain + + + VkResult vkGetRefreshCycleDurationGOOGLE + VkDevice device + VkSwapchainKHR swapchain + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties + + + VkResult vkGetPastPresentationTimingGOOGLE + VkDevice device + VkSwapchainKHR swapchain + uint32_t* pPresentationTimingCount + VkPastPresentationTimingGOOGLE* pPresentationTimings + + + VkResult vkCreateIOSSurfaceMVK + VkInstance instance + const VkIOSSurfaceCreateInfoMVK* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + VkResult vkCreateMacOSSurfaceMVK + VkInstance instance + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSurfaceKHR* pSurface + + + void vkCmdSetViewportWScalingNV + VkCommandBuffer commandBuffer + uint32_t firstViewport + uint32_t viewportCount + const VkViewportWScalingNV* pViewportWScalings + + + void vkCmdSetDiscardRectangleEXT + VkCommandBuffer commandBuffer + uint32_t firstDiscardRectangle + uint32_t discardRectangleCount + const VkRect2D* pDiscardRectangles + + + void vkCmdSetSampleLocationsEXT + VkCommandBuffer commandBuffer + const VkSampleLocationsInfoEXT* pSampleLocationsInfo + + + void vkGetPhysicalDeviceMultisamplePropertiesEXT + VkPhysicalDevice physicalDevice + VkSampleCountFlagBits samples + VkMultisamplePropertiesEXT* pMultisampleProperties + + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo + VkSurfaceCapabilities2KHR* pSurfaceCapabilities + + + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR + VkPhysicalDevice physicalDevice + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo + uint32_t* pSurfaceFormatCount + VkSurfaceFormat2KHR* pSurfaceFormats + + + void vkGetBufferMemoryRequirements2 + VkDevice device + const VkBufferMemoryRequirementsInfo2* pInfo + VkMemoryRequirements2* pMemoryRequirements + + + + void vkGetImageMemoryRequirements2 + VkDevice device + const VkImageMemoryRequirementsInfo2* pInfo + VkMemoryRequirements2* pMemoryRequirements + + + + void vkGetImageSparseMemoryRequirements2 + VkDevice device + const VkImageSparseMemoryRequirementsInfo2* pInfo + uint32_t* pSparseMemoryRequirementCount + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements + + + + VkResult vkCreateSamplerYcbcrConversion + VkDevice device + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkSamplerYcbcrConversion* pYcbcrConversion + + + + void vkDestroySamplerYcbcrConversion + VkDevice device + VkSamplerYcbcrConversion ycbcrConversion + const VkAllocationCallbacks* pAllocator + + + + void vkGetDeviceQueue2 + VkDevice device + const VkDeviceQueueInfo2* pQueueInfo + VkQueue* pQueue + + + VkResult vkCreateValidationCacheEXT + VkDevice device + const VkValidationCacheCreateInfoEXT* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkValidationCacheEXT* pValidationCache + + + void vkDestroyValidationCacheEXT + VkDevice device + VkValidationCacheEXT validationCache + const VkAllocationCallbacks* pAllocator + + + VkResult vkGetValidationCacheDataEXT + VkDevice device + VkValidationCacheEXT validationCache + size_t* pDataSize + void* pData + + + VkResult vkMergeValidationCachesEXT + VkDevice device + VkValidationCacheEXT dstCache + uint32_t srcCacheCount + const VkValidationCacheEXT* pSrcCaches + + + void vkGetDescriptorSetLayoutSupport + VkDevice device + const VkDescriptorSetLayoutCreateInfo* pCreateInfo + VkDescriptorSetLayoutSupport* pSupport + + + + VkResult vkGetSwapchainGrallocUsageANDROID + VkDevice device + VkFormat format + VkImageUsageFlags imageUsage + int* grallocUsage + + + VkResult vkAcquireImageANDROID + VkDevice device + VkImage image + int nativeFenceFd + VkSemaphore semaphore + VkFence fence + + + VkResult vkQueueSignalReleaseImageANDROID + VkQueue queue + uint32_t waitSemaphoreCount + const VkSemaphore* pWaitSemaphores + VkImage image + int* pNativeFenceFd + + + VkResult vkGetShaderInfoAMD + VkDevice device + VkPipeline pipeline + VkShaderStageFlagBits shaderStage + VkShaderInfoTypeAMD infoType + size_t* pInfoSize + void* pInfo + + + VkResult vkSetDebugUtilsObjectNameEXT + VkDevice device + const VkDebugUtilsObjectNameInfoEXT* pNameInfo + + + VkResult vkSetDebugUtilsObjectTagEXT + VkDevice device + const VkDebugUtilsObjectTagInfoEXT* pTagInfo + + + void vkQueueBeginDebugUtilsLabelEXT + VkQueue queue + const VkDebugUtilsLabelEXT* pLabelInfo + + + void vkQueueEndDebugUtilsLabelEXT + VkQueue queue + + + void vkQueueInsertDebugUtilsLabelEXT + VkQueue queue + const VkDebugUtilsLabelEXT* pLabelInfo + + + void vkCmdBeginDebugUtilsLabelEXT + VkCommandBuffer commandBuffer + const VkDebugUtilsLabelEXT* pLabelInfo + + + void vkCmdEndDebugUtilsLabelEXT + VkCommandBuffer commandBuffer + + + void vkCmdInsertDebugUtilsLabelEXT + VkCommandBuffer commandBuffer + const VkDebugUtilsLabelEXT* pLabelInfo + + + VkResult vkCreateDebugUtilsMessengerEXT + VkInstance instance + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo + const VkAllocationCallbacks* pAllocator + VkDebugUtilsMessengerEXT* pMessenger + + + void vkDestroyDebugUtilsMessengerEXT + VkInstance instance + VkDebugUtilsMessengerEXT messenger + const VkAllocationCallbacks* pAllocator + + + void vkSubmitDebugUtilsMessageEXT + VkInstance instance + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity + VkDebugUtilsMessageTypeFlagsEXT messageTypes + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData + + + VkResult vkGetMemoryHostPointerPropertiesEXT + VkDevice device + VkExternalMemoryHandleTypeFlagBits handleType + const void* pHostPointer + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties + + + void vkCmdWriteBufferMarkerAMD + VkCommandBuffer commandBuffer + VkPipelineStageFlagBits pipelineStage + VkBuffer dstBuffer + VkDeviceSize dstOffset + uint32_t marker + + + VkResult vkGetAndroidHardwareBufferPropertiesANDROID + VkDevice device + const struct AHardwareBuffer* buffer + VkAndroidHardwareBufferPropertiesANDROID* pProperties + + + VkResult vkGetMemoryAndroidHardwareBufferANDROID + VkDevice device + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo + struct AHardwareBuffer** pBuffer + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + offset 1 reserved for the old VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHX enum + offset 2 reserved for the old VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHX enum + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Additional dependent types / tokens extending enumerants, not explicitly mentioned + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Additional dependent types / tokens extending enumerants, not explicitly mentioned + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This duplicates definitions in VK_KHR_device_group below + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This duplicates definitions in other extensions, below + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + enum offset=0 was mistakenly used for the 1.1 core enum + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES + (value=1000094000). Fortunately, no conflict resulted. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -- 2.7.4