1 /*-------------------------------------------------------------------------
5 * Copyright (c) 2015 Google Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
21 * \brief Null (do-nothing) Vulkan implementation.
22 *//*--------------------------------------------------------------------*/
24 #include "vkNullDriver.hpp"
25 #include "vkPlatform.hpp"
26 #include "vkImageUtil.hpp"
27 #include "vkQueryUtil.hpp"
28 #include "tcuFunctionLibrary.hpp"
31 #if (DE_OS == DE_OS_ANDROID) && defined(__ANDROID_API_O__) && (DE_ANDROID_API >= __ANDROID_API_O__ /* __ANDROID_API_O__ */)
32 # define USE_ANDROID_O_HARDWARE_BUFFER
34 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
35 # include <android/hardware_buffer.h>
52 void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope)
54 void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope);
56 throw std::bad_alloc();
60 void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem)
62 pAllocator->pfnFree(pAllocator->pUserData, mem);
65 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
66 Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
68 Object* obj = DE_NULL;
72 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
75 obj = new (mem) Object(parent, pCreateInfo);
76 DE_ASSERT(obj == mem);
80 pAllocator->pfnFree(pAllocator->pUserData, mem);
85 obj = new Object(parent, pCreateInfo);
87 return reinterpret_cast<Handle>(obj);
90 template<typename Object, typename Handle, typename CreateInfo>
91 Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
93 Object* obj = DE_NULL;
97 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
100 obj = new (mem) Object(pCreateInfo);
101 DE_ASSERT(obj == mem);
105 pAllocator->pfnFree(pAllocator->pUserData, mem);
110 obj = new Object(pCreateInfo);
112 return reinterpret_cast<Handle>(obj);
115 template<typename Object, typename Handle, typename Parent>
116 Handle allocateHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
118 Object* obj = DE_NULL;
122 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
125 obj = new (mem) Object(parent);
126 DE_ASSERT(obj == mem);
130 pAllocator->pfnFree(pAllocator->pUserData, mem);
135 obj = new Object(parent);
137 return reinterpret_cast<Handle>(obj);
140 template<typename Object, typename Handle>
141 void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
143 Object* obj = reinterpret_cast<Object*>(handle);
148 freeSystemMem(pAllocator, reinterpret_cast<void*>(obj));
154 template<typename Object, typename BaseObject, typename Handle, typename Parent, typename CreateInfo>
155 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
157 Object* const obj = allocateHandle<Object, Object*>(parent, pCreateInfo, pAllocator);
158 return Handle((deUint64)(deUintptr)static_cast<BaseObject*>(obj));
161 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
162 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
164 return allocateNonDispHandle<Object, Object, Handle, Parent, CreateInfo>(parent, pCreateInfo, pAllocator);
167 template<typename Object, typename Handle, typename Parent>
168 Handle allocateNonDispHandle (Parent parent, const VkAllocationCallbacks* pAllocator)
170 Object* const obj = allocateHandle<Object, Object*>(parent, pAllocator);
171 return Handle((deUint64)(deUintptr)obj);
174 template<typename Object, typename Handle>
175 void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
177 freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator);
180 // Object definitions
182 #define VK_NULL_RETURN(STMT) \
187 } catch (const std::bad_alloc&) { \
188 return VK_ERROR_OUT_OF_HOST_MEMORY; \
189 } catch (VkResult res) { \
192 } while (deGetFalse())
194 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
195 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) { #NAME, (deFunctionPtr)FUNC } // NOLINT(FUNC)
197 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME) \
200 NAME (VkDevice, const Vk##NAME##CreateInfo*) {} \
203 #define VK_NULL_DEFINE_OBJ_WITH_POSTFIX(DEVICE_OR_INSTANCE, NAME, POSTFIX) \
204 struct NAME##POSTFIX \
206 NAME##POSTFIX (DEVICE_OR_INSTANCE, const Vk##NAME##CreateInfo##POSTFIX*) {} \
209 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
210 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
211 VK_NULL_DEFINE_DEVICE_OBJ(Event);
212 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
213 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
214 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
215 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
216 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
217 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
218 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
219 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
220 VK_NULL_DEFINE_DEVICE_OBJ(SamplerYcbcrConversion);
221 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Swapchain, KHR)
222 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugUtilsMessenger, EXT)
224 #ifndef CTS_USES_VULKANSC
225 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
226 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorUpdateTemplate);
227 VK_NULL_DEFINE_DEVICE_OBJ(PrivateDataSlot);
228 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkInstance, DebugReportCallback, EXT)
229 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuModule, NVX)
230 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, CuFunction, NVX)
231 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, Micromap, EXT)
232 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, OpticalFlowSession, NV)
233 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, IndirectCommandsLayout, NV)
234 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, NV)
235 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, AccelerationStructure, KHR)
236 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSession, KHR)
237 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, VideoSessionParameters, KHR)
238 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, ValidationCache, EXT)
239 VK_NULL_DEFINE_OBJ_WITH_POSTFIX(VkDevice, BufferCollection, FUCHSIA)
240 #endif // CTS_USES_VULKANSC
245 Instance (const VkInstanceCreateInfo* instanceInfo);
248 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
251 const tcu::StaticFunctionLibrary m_functions;
257 #ifndef CTS_USES_VULKANSC
258 SurfaceKHR (VkInstance, const VkXlibSurfaceCreateInfoKHR*) {}
259 SurfaceKHR (VkInstance, const VkXcbSurfaceCreateInfoKHR*) {}
260 SurfaceKHR (VkInstance, const VkWaylandSurfaceCreateInfoKHR*) {}
261 SurfaceKHR (VkInstance, const VkAndroidSurfaceCreateInfoKHR*) {}
262 SurfaceKHR (VkInstance, const VkWin32SurfaceCreateInfoKHR*) {}
263 SurfaceKHR (VkInstance, const VkViSurfaceCreateInfoNN*) {}
264 SurfaceKHR (VkInstance, const VkIOSSurfaceCreateInfoMVK*) {}
265 SurfaceKHR (VkInstance, const VkMacOSSurfaceCreateInfoMVK*) {}
266 SurfaceKHR (VkInstance, const VkImagePipeSurfaceCreateInfoFUCHSIA*) {}
267 SurfaceKHR (VkInstance, const VkStreamDescriptorSurfaceCreateInfoGGP*) {}
268 SurfaceKHR (VkInstance, const VkMetalSurfaceCreateInfoEXT*) {}
269 SurfaceKHR (VkInstance, const VkScreenSurfaceCreateInfoQNX*) {}
270 #endif // CTS_USES_VULKANSC
271 SurfaceKHR (VkInstance, const VkDisplaySurfaceCreateInfoKHR*) {}
272 SurfaceKHR (VkInstance, const VkHeadlessSurfaceCreateInfoEXT*) {}
273 ~SurfaceKHR (void) {}
279 DisplayModeKHR (VkDisplayKHR, const VkDisplayModeCreateInfoKHR*) {}
280 ~DisplayModeKHR (void) {}
286 Device (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo);
289 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
292 const tcu::StaticFunctionLibrary m_functions;
298 Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {}
299 Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {}
300 #ifndef CTS_USES_VULKANSC
301 Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoNV*) {}
302 Pipeline (VkDevice, const VkRayTracingPipelineCreateInfoKHR*) {}
303 #endif // CTS_USES_VULKANSC
309 RenderPass (VkDevice, const VkRenderPassCreateInfo*) {}
310 RenderPass (VkDevice, const VkRenderPassCreateInfo2*) {}
316 Buffer (VkDevice, const VkBufferCreateInfo* pCreateInfo)
317 : m_size (pCreateInfo->size)
321 VkDeviceSize getSize (void) const { return m_size; }
324 const VkDeviceSize m_size;
327 VkExternalMemoryHandleTypeFlags getExternalTypesHandle (const VkImageCreateInfo* pCreateInfo)
329 const VkExternalMemoryImageCreateInfo* const externalInfo = findStructure<VkExternalMemoryImageCreateInfo> (pCreateInfo->pNext);
331 return externalInfo ? externalInfo->handleTypes : 0u;
337 Image (VkDevice, const VkImageCreateInfo* pCreateInfo)
338 : m_imageType (pCreateInfo->imageType)
339 , m_format (pCreateInfo->format)
340 , m_extent (pCreateInfo->extent)
341 , m_arrayLayers (pCreateInfo->arrayLayers)
342 , m_samples (pCreateInfo->samples)
343 , m_usage (pCreateInfo->usage)
344 , m_flags (pCreateInfo->flags)
345 , m_externalHandleTypes (getExternalTypesHandle(pCreateInfo))
349 VkImageType getImageType (void) const { return m_imageType; }
350 VkFormat getFormat (void) const { return m_format; }
351 VkExtent3D getExtent (void) const { return m_extent; }
352 deUint32 getArrayLayers (void) const { return m_arrayLayers; }
353 VkSampleCountFlagBits getSamples (void) const { return m_samples; }
354 VkImageUsageFlags getUsage (void) const { return m_usage; }
355 VkImageCreateFlags getFlags (void) const { return m_flags; }
356 VkExternalMemoryHandleTypeFlags getExternalHandleTypes (void) const { return m_externalHandleTypes; }
359 const VkImageType m_imageType;
360 const VkFormat m_format;
361 const VkExtent3D m_extent;
362 const deUint32 m_arrayLayers;
363 const VkSampleCountFlagBits m_samples;
364 const VkImageUsageFlags m_usage;
365 const VkImageCreateFlags m_flags;
366 const VkExternalMemoryHandleTypeFlags m_externalHandleTypes;
369 void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo)
371 // \todo [2015-12-03 pyry] Alignment requirements?
372 // \todo [2015-12-03 pyry] Empty allocations okay?
373 if (pAllocInfo->allocationSize > 0)
375 void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
377 throw std::bad_alloc();
384 void freeHeap (void* ptr)
392 virtual ~DeviceMemory (void) {}
393 virtual void* map (void) = 0;
394 virtual void unmap (void) = 0;
397 class PrivateDeviceMemory : public DeviceMemory
400 PrivateDeviceMemory (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
401 : m_memory(allocateHeap(pAllocInfo))
403 // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
404 // but currently it mostly hinders it.
406 deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
408 virtual ~PrivateDeviceMemory (void)
413 virtual void* map (void) /*override*/ { return m_memory; }
414 virtual void unmap (void) /*override*/ {}
417 void* const m_memory;
420 #ifndef CTS_USES_VULKANSC
422 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
423 AHardwareBuffer* findOrCreateHwBuffer (const VkMemoryAllocateInfo* pAllocInfo)
425 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocInfo->pNext);
426 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocInfo->pNext);
427 const VkMemoryDedicatedAllocateInfo* const dedicatedInfo = findStructure<VkMemoryDedicatedAllocateInfo>(pAllocInfo->pNext);
428 const Image* const image = dedicatedInfo && !!dedicatedInfo->image ? reinterpret_cast<const Image*>(dedicatedInfo->image.getInternal()) : DE_NULL;
429 AHardwareBuffer* hwbuffer = DE_NULL;
431 // Import and export aren't mutually exclusive; we can have both simultaneously.
432 DE_ASSERT((importInfo && importInfo->buffer.internal) ||
433 (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0));
435 if (importInfo && importInfo->buffer.internal)
437 hwbuffer = (AHardwareBuffer*)importInfo->buffer.internal;
438 AHardwareBuffer_acquire(hwbuffer);
440 else if (exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
442 AHardwareBuffer_Desc hwbufferDesc;
443 deMemset(&hwbufferDesc, 0, sizeof(hwbufferDesc));
447 hwbufferDesc.width = image->getExtent().width;
448 hwbufferDesc.height = image->getExtent().height;
449 hwbufferDesc.layers = image->getArrayLayers();
450 switch (image->getFormat())
452 case VK_FORMAT_R8G8B8A8_UNORM:
453 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
455 case VK_FORMAT_R8G8B8_UNORM:
456 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
458 case VK_FORMAT_R5G6B5_UNORM_PACK16:
459 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
461 case VK_FORMAT_R16G16B16A16_SFLOAT:
462 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
464 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
465 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
468 DE_FATAL("Unsupported image format for Android hardware buffer export");
471 if ((image->getUsage() & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
472 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
473 if ((image->getUsage() & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0)
474 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
475 // if ((image->getFlags() & VK_IMAGE_CREATE_PROTECTED_BIT) != 0)
476 // hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
478 // Make sure we have at least one AHB GPU usage, even if the image doesn't have any
479 // Vulkan usages with corresponding to AHB GPU usages.
480 if ((image->getUsage() & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) == 0)
481 hwbufferDesc.usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
485 hwbufferDesc.width = static_cast<deUint32>(pAllocInfo->allocationSize);
486 hwbufferDesc.height = 1,
487 hwbufferDesc.layers = 1,
488 hwbufferDesc.format = AHARDWAREBUFFER_FORMAT_BLOB,
489 hwbufferDesc.usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
492 AHardwareBuffer_allocate(&hwbufferDesc, &hwbuffer);
498 class ExternalDeviceMemoryAndroid : public DeviceMemory
501 ExternalDeviceMemoryAndroid (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
502 : m_hwbuffer(findOrCreateHwBuffer(pAllocInfo))
504 virtual ~ExternalDeviceMemoryAndroid (void)
507 AHardwareBuffer_release(m_hwbuffer);
510 virtual void* map (void) /*override*/
513 AHardwareBuffer_lock(m_hwbuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, NULL, &p);
517 virtual void unmap (void) /*override*/ { AHardwareBuffer_unlock(m_hwbuffer, NULL); }
519 AHardwareBuffer* getHwBuffer (void) { return m_hwbuffer; }
522 AHardwareBuffer* const m_hwbuffer;
524 #endif // defined(USE_ANDROID_O_HARDWARE_BUFFER)
526 #endif // CTS_USES_VULKANSC
528 class DeferredOperationKHR
531 DeferredOperationKHR (VkDevice)
538 CommandBuffer (VkDevice, VkCommandPool, VkCommandBufferLevel)
545 CommandPool (VkDevice device, const VkCommandPoolCreateInfo*)
548 #ifndef CTS_USES_VULKANSC
550 #endif // CTS_USES_VULKANSC
552 VkCommandBuffer allocate (VkCommandBufferLevel level);
553 void free (VkCommandBuffer buffer);
556 const VkDevice m_device;
558 vector<CommandBuffer*> m_buffers;
561 #ifndef CTS_USES_VULKANSC
563 CommandPool::~CommandPool (void)
565 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
566 delete m_buffers[ndx];
569 #endif // CTS_USES_VULKANSC
571 VkCommandBuffer CommandPool::allocate (VkCommandBufferLevel level)
573 CommandBuffer* const impl = new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<deUintptr>(this)), level);
577 m_buffers.push_back(impl);
585 return reinterpret_cast<VkCommandBuffer>(impl);
588 void CommandPool::free (VkCommandBuffer buffer)
590 CommandBuffer* const impl = reinterpret_cast<CommandBuffer*>(buffer);
592 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
594 if (m_buffers[ndx] == impl)
596 std::swap(m_buffers[ndx], m_buffers.back());
597 m_buffers.pop_back();
603 DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
609 DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {}
615 DescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo)
617 , m_flags (pCreateInfo->flags)
619 ~DescriptorPool (void)
624 VkDescriptorSet allocate (VkDescriptorSetLayout setLayout);
625 void free (VkDescriptorSet set);
630 const VkDevice m_device;
631 const VkDescriptorPoolCreateFlags m_flags;
633 vector<DescriptorSet*> m_managedSets;
636 VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout)
638 DescriptorSet* const impl = new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout);
642 m_managedSets.push_back(impl);
650 return VkDescriptorSet(reinterpret_cast<deUintptr>(impl));
653 void DescriptorPool::free (VkDescriptorSet set)
655 DescriptorSet* const impl = reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal());
657 DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
660 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
662 if (m_managedSets[ndx] == impl)
664 std::swap(m_managedSets[ndx], m_managedSets.back());
665 m_managedSets.pop_back();
671 DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
674 void DescriptorPool::reset (void)
676 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
677 delete m_managedSets[ndx];
678 m_managedSets.clear();
681 // API implementation
686 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName)
688 return reinterpret_cast<Device*>(device)->getProcAddr(pName);
691 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
696 for (allocNdx = 0; allocNdx < count; allocNdx++)
697 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
701 catch (const std::bad_alloc&)
703 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
704 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
706 return VK_ERROR_OUT_OF_HOST_MEMORY;
710 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
711 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
717 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
722 for (allocNdx = 0; allocNdx < count; allocNdx++)
723 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
727 catch (const std::bad_alloc&)
729 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
730 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
732 return VK_ERROR_OUT_OF_HOST_MEMORY;
736 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
737 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
743 #ifndef CTS_USES_VULKANSC
745 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesNV (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
750 for (allocNdx = 0; allocNdx < count; allocNdx++)
751 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
755 catch (const std::bad_alloc&)
757 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
758 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
760 return VK_ERROR_OUT_OF_HOST_MEMORY;
764 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
765 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
771 VKAPI_ATTR VkResult VKAPI_CALL createRayTracingPipelinesKHR (VkDevice device, VkPipelineCache, deUint32 count, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
776 for (allocNdx = 0; allocNdx < count; allocNdx++)
777 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
781 catch (const std::bad_alloc&)
783 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
784 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
786 return VK_ERROR_OUT_OF_HOST_MEMORY;
790 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
791 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
797 #endif // CTS_USES_VULKANSC
799 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices)
801 if (pDevices && *pPhysicalDeviceCount >= 1u)
802 *pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u);
804 *pPhysicalDeviceCount = 1;
809 VkResult enumerateExtensions (deUint32 numExtensions, const VkExtensionProperties* extensions, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
811 const deUint32 dstSize = pPropertyCount ? *pPropertyCount : 0;
814 *pPropertyCount = numExtensions;
818 for (deUint32 ndx = 0; ndx < de::min(numExtensions, dstSize); ++ndx)
819 pProperties[ndx] = extensions[ndx];
821 if (dstSize < numExtensions)
822 return VK_INCOMPLETE;
828 VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
830 static const VkExtensionProperties s_extensions[] =
832 { "VK_KHR_get_physical_device_properties2", 1u },
833 { "VK_KHR_external_memory_capabilities", 1u },
837 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
839 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
842 VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
844 DE_UNREF(physicalDevice);
846 static const VkExtensionProperties s_extensions[] =
848 { "VK_KHR_bind_memory2", 1u },
849 { "VK_KHR_external_memory", 1u },
850 { "VK_KHR_get_memory_requirements2", 1u },
851 { "VK_KHR_maintenance1", 1u },
852 { "VK_KHR_sampler_ycbcr_conversion", 1u },
853 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
854 { "VK_ANDROID_external_memory_android_hardware_buffer", 1u },
859 return enumerateExtensions((deUint32)DE_LENGTH_OF_ARRAY(s_extensions), s_extensions, pPropertyCount, pProperties);
861 return enumerateExtensions(0, DE_NULL, pPropertyCount, pProperties);
864 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
866 DE_UNREF(physicalDevice);
868 // Enable all features allow as many tests to run as possible
869 pFeatures->robustBufferAccess = VK_TRUE;
870 pFeatures->fullDrawIndexUint32 = VK_TRUE;
871 pFeatures->imageCubeArray = VK_TRUE;
872 pFeatures->independentBlend = VK_TRUE;
873 pFeatures->geometryShader = VK_TRUE;
874 pFeatures->tessellationShader = VK_TRUE;
875 pFeatures->sampleRateShading = VK_TRUE;
876 pFeatures->dualSrcBlend = VK_TRUE;
877 pFeatures->logicOp = VK_TRUE;
878 pFeatures->multiDrawIndirect = VK_TRUE;
879 pFeatures->drawIndirectFirstInstance = VK_TRUE;
880 pFeatures->depthClamp = VK_TRUE;
881 pFeatures->depthBiasClamp = VK_TRUE;
882 pFeatures->fillModeNonSolid = VK_TRUE;
883 pFeatures->depthBounds = VK_TRUE;
884 pFeatures->wideLines = VK_TRUE;
885 pFeatures->largePoints = VK_TRUE;
886 pFeatures->alphaToOne = VK_TRUE;
887 pFeatures->multiViewport = VK_TRUE;
888 pFeatures->samplerAnisotropy = VK_TRUE;
889 pFeatures->textureCompressionETC2 = VK_TRUE;
890 pFeatures->textureCompressionASTC_LDR = VK_TRUE;
891 pFeatures->textureCompressionBC = VK_TRUE;
892 pFeatures->occlusionQueryPrecise = VK_TRUE;
893 pFeatures->pipelineStatisticsQuery = VK_TRUE;
894 pFeatures->vertexPipelineStoresAndAtomics = VK_TRUE;
895 pFeatures->fragmentStoresAndAtomics = VK_TRUE;
896 pFeatures->shaderTessellationAndGeometryPointSize = VK_TRUE;
897 pFeatures->shaderImageGatherExtended = VK_TRUE;
898 pFeatures->shaderStorageImageExtendedFormats = VK_TRUE;
899 pFeatures->shaderStorageImageMultisample = VK_TRUE;
900 pFeatures->shaderStorageImageReadWithoutFormat = VK_TRUE;
901 pFeatures->shaderStorageImageWriteWithoutFormat = VK_TRUE;
902 pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
903 pFeatures->shaderSampledImageArrayDynamicIndexing = VK_TRUE;
904 pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
905 pFeatures->shaderStorageImageArrayDynamicIndexing = VK_TRUE;
906 pFeatures->shaderClipDistance = VK_TRUE;
907 pFeatures->shaderCullDistance = VK_TRUE;
908 pFeatures->shaderFloat64 = VK_TRUE;
909 pFeatures->shaderInt64 = VK_TRUE;
910 pFeatures->shaderInt16 = VK_TRUE;
911 pFeatures->shaderResourceResidency = VK_TRUE;
912 pFeatures->shaderResourceMinLod = VK_TRUE;
913 pFeatures->sparseBinding = VK_TRUE;
914 pFeatures->sparseResidencyBuffer = VK_TRUE;
915 pFeatures->sparseResidencyImage2D = VK_TRUE;
916 pFeatures->sparseResidencyImage3D = VK_TRUE;
917 pFeatures->sparseResidency2Samples = VK_TRUE;
918 pFeatures->sparseResidency4Samples = VK_TRUE;
919 pFeatures->sparseResidency8Samples = VK_TRUE;
920 pFeatures->sparseResidency16Samples = VK_TRUE;
921 pFeatures->sparseResidencyAliased = VK_TRUE;
922 pFeatures->variableMultisampleRate = VK_TRUE;
923 pFeatures->inheritedQueries = VK_TRUE;
926 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props)
928 deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
930 props->apiVersion = VK_API_VERSION_1_1;
931 props->driverVersion = 1u;
932 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
934 deMemcpy(props->deviceName, "null", 5);
937 props->limits.maxImageDimension1D = 4096;
938 props->limits.maxImageDimension2D = 4096;
939 props->limits.maxImageDimension3D = 256;
940 props->limits.maxImageDimensionCube = 4096;
941 props->limits.maxImageArrayLayers = 256;
942 props->limits.maxTexelBufferElements = 65536;
943 props->limits.maxUniformBufferRange = 16384;
944 props->limits.maxStorageBufferRange = 1u<<27;
945 props->limits.maxPushConstantsSize = 128;
946 props->limits.maxMemoryAllocationCount = 4096;
947 props->limits.maxSamplerAllocationCount = 4000;
948 props->limits.bufferImageGranularity = 131072;
949 props->limits.sparseAddressSpaceSize = 1u<<31;
950 props->limits.maxBoundDescriptorSets = 4;
951 props->limits.maxPerStageDescriptorSamplers = 16;
952 props->limits.maxPerStageDescriptorUniformBuffers = 12;
953 props->limits.maxPerStageDescriptorStorageBuffers = 4;
954 props->limits.maxPerStageDescriptorSampledImages = 16;
955 props->limits.maxPerStageDescriptorStorageImages = 4;
956 props->limits.maxPerStageDescriptorInputAttachments = 4;
957 props->limits.maxPerStageResources = 128;
958 props->limits.maxDescriptorSetSamplers = 96;
959 props->limits.maxDescriptorSetUniformBuffers = 72;
960 props->limits.maxDescriptorSetUniformBuffersDynamic = 8;
961 props->limits.maxDescriptorSetStorageBuffers = 24;
962 props->limits.maxDescriptorSetStorageBuffersDynamic = 4;
963 props->limits.maxDescriptorSetSampledImages = 96;
964 props->limits.maxDescriptorSetStorageImages = 24;
965 props->limits.maxDescriptorSetInputAttachments = 4;
966 props->limits.maxVertexInputAttributes = 16;
967 props->limits.maxVertexInputBindings = 16;
968 props->limits.maxVertexInputAttributeOffset = 2047;
969 props->limits.maxVertexInputBindingStride = 2048;
970 props->limits.maxVertexOutputComponents = 64;
971 props->limits.maxTessellationGenerationLevel = 64;
972 props->limits.maxTessellationPatchSize = 32;
973 props->limits.maxTessellationControlPerVertexInputComponents = 64;
974 props->limits.maxTessellationControlPerVertexOutputComponents = 64;
975 props->limits.maxTessellationControlPerPatchOutputComponents = 120;
976 props->limits.maxTessellationControlTotalOutputComponents = 2048;
977 props->limits.maxTessellationEvaluationInputComponents = 64;
978 props->limits.maxTessellationEvaluationOutputComponents = 64;
979 props->limits.maxGeometryShaderInvocations = 32;
980 props->limits.maxGeometryInputComponents = 64;
981 props->limits.maxGeometryOutputComponents = 64;
982 props->limits.maxGeometryOutputVertices = 256;
983 props->limits.maxGeometryTotalOutputComponents = 1024;
984 props->limits.maxFragmentInputComponents = 64;
985 props->limits.maxFragmentOutputAttachments = 4;
986 props->limits.maxFragmentDualSrcAttachments = 1;
987 props->limits.maxFragmentCombinedOutputResources = 4;
988 props->limits.maxComputeSharedMemorySize = 16384;
989 props->limits.maxComputeWorkGroupCount[0] = 65535;
990 props->limits.maxComputeWorkGroupCount[1] = 65535;
991 props->limits.maxComputeWorkGroupCount[2] = 65535;
992 props->limits.maxComputeWorkGroupInvocations = 128;
993 props->limits.maxComputeWorkGroupSize[0] = 128;
994 props->limits.maxComputeWorkGroupSize[1] = 128;
995 props->limits.maxComputeWorkGroupSize[2] = 128;
996 props->limits.subPixelPrecisionBits = 4;
997 props->limits.subTexelPrecisionBits = 4;
998 props->limits.mipmapPrecisionBits = 4;
999 props->limits.maxDrawIndexedIndexValue = 0xffffffffu;
1000 props->limits.maxDrawIndirectCount = (1u<<16) - 1u;
1001 props->limits.maxSamplerLodBias = 2.0f;
1002 props->limits.maxSamplerAnisotropy = 16.0f;
1003 props->limits.maxViewports = 16;
1004 props->limits.maxViewportDimensions[0] = 4096;
1005 props->limits.maxViewportDimensions[1] = 4096;
1006 props->limits.viewportBoundsRange[0] = -8192.f;
1007 props->limits.viewportBoundsRange[1] = 8191.f;
1008 props->limits.viewportSubPixelBits = 0;
1009 props->limits.minMemoryMapAlignment = 64;
1010 props->limits.minTexelBufferOffsetAlignment = 256;
1011 props->limits.minUniformBufferOffsetAlignment = 256;
1012 props->limits.minStorageBufferOffsetAlignment = 256;
1013 props->limits.minTexelOffset = -8;
1014 props->limits.maxTexelOffset = 7;
1015 props->limits.minTexelGatherOffset = -8;
1016 props->limits.maxTexelGatherOffset = 7;
1017 props->limits.minInterpolationOffset = -0.5f;
1018 props->limits.maxInterpolationOffset = 0.5f; // -1ulp
1019 props->limits.subPixelInterpolationOffsetBits = 4;
1020 props->limits.maxFramebufferWidth = 4096;
1021 props->limits.maxFramebufferHeight = 4096;
1022 props->limits.maxFramebufferLayers = 256;
1023 props->limits.framebufferColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1024 props->limits.framebufferDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1025 props->limits.framebufferStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1026 props->limits.framebufferNoAttachmentsSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1027 props->limits.maxColorAttachments = 4;
1028 props->limits.sampledImageColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1029 props->limits.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT;
1030 props->limits.sampledImageDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1031 props->limits.sampledImageStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1032 props->limits.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1033 props->limits.maxSampleMaskWords = 1;
1034 props->limits.timestampComputeAndGraphics = VK_TRUE;
1035 props->limits.timestampPeriod = 1.0f;
1036 props->limits.maxClipDistances = 8;
1037 props->limits.maxCullDistances = 8;
1038 props->limits.maxCombinedClipAndCullDistances = 8;
1039 props->limits.discreteQueuePriorities = 2;
1040 props->limits.pointSizeRange[0] = 1.0f;
1041 props->limits.pointSizeRange[1] = 64.0f; // -1ulp
1042 props->limits.lineWidthRange[0] = 1.0f;
1043 props->limits.lineWidthRange[1] = 8.0f; // -1ulp
1044 props->limits.pointSizeGranularity = 1.0f;
1045 props->limits.lineWidthGranularity = 1.0f;
1046 props->limits.strictLines = 0;
1047 props->limits.standardSampleLocations = VK_TRUE;
1048 props->limits.optimalBufferCopyOffsetAlignment = 256;
1049 props->limits.optimalBufferCopyRowPitchAlignment = 256;
1050 props->limits.nonCoherentAtomSize = 128;
1053 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props)
1055 if (props && *count >= 1u)
1057 deMemset(props, 0, sizeof(VkQueueFamilyProperties));
1059 props->queueCount = 4u;
1060 props->queueFlags = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT;
1061 props->timestampValidBits = 64;
1067 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props)
1069 deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
1071 props->memoryTypeCount = 1u;
1072 props->memoryTypes[0].heapIndex = 0u;
1073 props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT
1074 | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT
1075 | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1077 props->memoryHeapCount = 1u;
1078 props->memoryHeaps[0].size = 1ull << 31;
1079 props->memoryHeaps[0].flags = 0u;
1082 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat format, VkFormatProperties* pFormatProperties)
1084 const VkFormatFeatureFlags allFeatures = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
1085 | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT
1086 | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT
1087 | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT
1088 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT
1089 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT
1090 | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT
1091 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
1092 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT
1093 | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT
1094 | VK_FORMAT_FEATURE_BLIT_SRC_BIT
1095 | VK_FORMAT_FEATURE_BLIT_DST_BIT
1096 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT
1097 | VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
1098 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT
1099 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT
1100 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT
1101 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT
1102 | VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT;
1104 pFormatProperties->linearTilingFeatures = allFeatures;
1105 pFormatProperties->optimalTilingFeatures = allFeatures;
1106 pFormatProperties->bufferFeatures = allFeatures;
1108 if (isYCbCrFormat(format) && getPlaneCount(format) > 1)
1109 pFormatProperties->optimalTilingFeatures |= VK_FORMAT_FEATURE_DISJOINT_BIT;
1112 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
1114 DE_UNREF(physicalDevice);
1121 pImageFormatProperties->maxArrayLayers = 8;
1122 pImageFormatProperties->maxExtent.width = 4096;
1123 pImageFormatProperties->maxExtent.height = 4096;
1124 pImageFormatProperties->maxExtent.depth = 4096;
1125 pImageFormatProperties->maxMipLevels = deLog2Ceil32(4096) + 1;
1126 pImageFormatProperties->maxResourceSize = 64u * 1024u * 1024u;
1127 pImageFormatProperties->sampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
1132 VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue)
1135 DE_UNREF(queueFamilyIndex);
1138 *pQueue = reinterpret_cast<VkQueue>((deUint64)queueIndex + 1);
1141 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements)
1143 const Buffer* buffer = reinterpret_cast<const Buffer*>(bufferHandle.getInternal());
1145 requirements->memoryTypeBits = 1u;
1146 requirements->size = buffer->getSize();
1147 requirements->alignment = (VkDeviceSize)1u;
1150 VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
1152 return (VkDeviceSize)getPixelSize(mapVkFormat(format))
1153 * (VkDeviceSize)extent.width
1154 * (VkDeviceSize)extent.height
1155 * (VkDeviceSize)extent.depth
1156 * (VkDeviceSize)samples;
1159 VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent)
1163 const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format);
1164 const size_t blockSize = tcu::getBlockSize(tcuFormat);
1165 const tcu::IVec3 blockPixelSize = tcu::getBlockPixelSize(tcuFormat);
1166 const int numBlocksX = deDivRoundUp32((int)extent.width, blockPixelSize.x());
1167 const int numBlocksY = deDivRoundUp32((int)extent.height, blockPixelSize.y());
1168 const int numBlocksZ = deDivRoundUp32((int)extent.depth, blockPixelSize.z());
1170 return blockSize*numBlocksX*numBlocksY*numBlocksZ;
1174 return 0; // Unsupported compressed format
1178 VkDeviceSize getYCbCrImageDataSize (VkFormat format, VkExtent3D extent)
1180 const PlanarFormatDescription desc = getPlanarFormatDescription(format);
1181 VkDeviceSize totalSize = 0;
1183 DE_ASSERT(extent.depth == 1);
1185 for (deUint32 planeNdx = 0; planeNdx < desc.numPlanes; ++planeNdx)
1187 const deUint32 elementSize = desc.planes[planeNdx].elementSizeBytes;
1189 totalSize = (VkDeviceSize)deAlign64((deInt64)totalSize, elementSize);
1190 totalSize += getPlaneSizeInBytes(desc, extent, planeNdx, 0, BUFFER_IMAGE_COPY_OFFSET_GRANULARITY);
1196 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements)
1198 const Image* image = reinterpret_cast<const Image*>(imageHandle.getInternal());
1200 requirements->memoryTypeBits = 1u;
1201 requirements->alignment = 16u;
1203 if (isCompressedFormat(image->getFormat()))
1204 requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
1205 else if (isYCbCrFormat(image->getFormat()))
1206 requirements->size = getYCbCrImageDataSize(image->getFormat(), image->getExtent());
1208 requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
1211 VKAPI_ATTR VkResult VKAPI_CALL allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory)
1213 #ifndef CTS_USES_VULKANSC
1214 const VkExportMemoryAllocateInfo* const exportInfo = findStructure<VkExportMemoryAllocateInfo>(pAllocateInfo->pNext);
1215 const VkImportAndroidHardwareBufferInfoANDROID* const importInfo = findStructure<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo->pNext);
1217 if ((exportInfo && (exportInfo->handleTypes & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) != 0)
1218 || (importInfo && importInfo->buffer.internal))
1220 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1221 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<ExternalDeviceMemoryAndroid, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1223 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1228 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1230 #else // CTS_USES_VULKANSC
1231 VK_NULL_RETURN((*pMemory = allocateNonDispHandle<PrivateDeviceMemory, DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
1232 #endif // CTS_USES_VULKANSC
1235 VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
1237 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1242 *ppData = (deUint8*)memory->map() + offset;
1247 VKAPI_ATTR void VKAPI_CALL unmapMemory (VkDevice device, VkDeviceMemory memHandle)
1249 DeviceMemory* const memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
1256 #ifndef CTS_USES_VULKANSC
1258 VKAPI_ATTR VkResult VKAPI_CALL getMemoryAndroidHardwareBufferANDROID (VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, pt::AndroidHardwareBufferPtr* pBuffer)
1262 #if defined(USE_ANDROID_O_HARDWARE_BUFFER)
1263 DeviceMemory* const memory = reinterpret_cast<ExternalDeviceMemoryAndroid*>(pInfo->memory.getInternal());
1264 ExternalDeviceMemoryAndroid* const androidMemory = static_cast<ExternalDeviceMemoryAndroid*>(memory);
1266 AHardwareBuffer* hwbuffer = androidMemory->getHwBuffer();
1267 AHardwareBuffer_acquire(hwbuffer);
1268 pBuffer->internal = hwbuffer;
1277 #endif // CTS_USES_VULKANSC
1279 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
1281 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal());
1283 for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
1287 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
1289 catch (const std::bad_alloc&)
1291 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1292 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1294 return VK_ERROR_OUT_OF_HOST_MEMORY;
1296 catch (VkResult res)
1298 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
1299 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
1308 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets)
1310 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1312 for (deUint32 ndx = 0; ndx < count; ++ndx)
1313 poolImpl->free(pDescriptorSets[ndx]);
1316 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags)
1318 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
1325 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
1329 if (pAllocateInfo && pCommandBuffers)
1331 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)pAllocateInfo->commandPool.getInternal());
1333 for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
1334 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
1340 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
1342 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)commandPool.getInternal());
1346 for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx)
1347 poolImpl->free(pCommandBuffers[ndx]);
1351 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR (VkPhysicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode)
1353 DE_UNREF(pAllocator);
1354 VK_NULL_RETURN((*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
1357 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR (VkDevice device, deUint32 swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains)
1359 for (deUint32 ndx = 0; ndx < swapchainCount; ++ndx)
1361 pSwapchains[ndx] = allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos+ndx, pAllocator);
1367 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceExternalBufferPropertiesKHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties)
1369 DE_UNREF(physicalDevice);
1370 DE_UNREF(pExternalBufferInfo);
1372 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
1373 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
1374 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0;
1376 #ifndef CTS_USES_VULKANSC
1377 if (pExternalBufferInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1379 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1380 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1381 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1383 #endif // CTS_USES_VULKANSC
1386 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties2KHR (VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties)
1388 #ifndef CTS_USES_VULKANSC
1389 const VkPhysicalDeviceExternalImageFormatInfo* const externalInfo = findStructure<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
1390 VkExternalImageFormatProperties* const externalProperties = findStructure<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
1393 result = getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1394 if (result != VK_SUCCESS)
1397 if (externalInfo && externalInfo->handleType != 0)
1399 if (externalInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
1400 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1402 if (!(pImageFormatInfo->format == VK_FORMAT_R8G8B8A8_UNORM
1403 || pImageFormatInfo->format == VK_FORMAT_R8G8B8_UNORM
1404 || pImageFormatInfo->format == VK_FORMAT_R5G6B5_UNORM_PACK16
1405 || pImageFormatInfo->format == VK_FORMAT_R16G16B16A16_SFLOAT
1406 || pImageFormatInfo->format == VK_FORMAT_A2R10G10B10_UNORM_PACK32))
1408 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1411 if (pImageFormatInfo->type != VK_IMAGE_TYPE_2D)
1412 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1414 if ((pImageFormatInfo->usage & ~(VK_IMAGE_USAGE_TRANSFER_SRC_BIT
1415 | VK_IMAGE_USAGE_TRANSFER_DST_BIT
1416 | VK_IMAGE_USAGE_SAMPLED_BIT
1417 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
1420 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1423 if ((pImageFormatInfo->flags & ~(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT
1424 /*| VK_IMAGE_CREATE_PROTECTED_BIT*/
1425 /*| VK_IMAGE_CREATE_EXTENDED_USAGE_BIT*/))
1428 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1431 if (externalProperties)
1433 externalProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT
1434 | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT
1435 | VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
1436 externalProperties->externalMemoryProperties.exportFromImportedHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1437 externalProperties->externalMemoryProperties.compatibleHandleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1442 #else // CTS_USES_VULKANSC
1443 return getPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
1444 #endif // CTS_USES_VULKANSC
1447 // \note getInstanceProcAddr is a little bit special:
1448 // vkNullDriverImpl.inl needs it to define s_platformFunctions but
1449 // getInstanceProcAddr() implementation needs other entry points from
1450 // vkNullDriverImpl.inl.
1451 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName);
1453 #include "vkNullDriverImpl.inl"
1455 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName)
1459 return reinterpret_cast<Instance*>(instance)->getProcAddr(pName);
1463 const std::string name = pName;
1465 if (name == "vkCreateInstance")
1466 return (PFN_vkVoidFunction)createInstance;
1467 else if (name == "vkEnumerateInstanceExtensionProperties")
1468 return (PFN_vkVoidFunction)enumerateInstanceExtensionProperties;
1469 else if (name == "vkEnumerateInstanceLayerProperties")
1470 return (PFN_vkVoidFunction)enumerateInstanceLayerProperties;
1472 return (PFN_vkVoidFunction)DE_NULL;
1478 Instance::Instance (const VkInstanceCreateInfo*)
1479 : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
1483 Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*)
1484 : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
1488 class NullDriverLibrary : public Library
1491 NullDriverLibrary (void)
1492 : m_library (s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
1493 , m_driver (m_library)
1496 const PlatformInterface& getPlatformInterface (void) const { return m_driver; }
1497 const tcu::FunctionLibrary& getFunctionLibrary (void) const { return m_library; }
1499 const tcu::StaticFunctionLibrary m_library;
1500 const PlatformDriver m_driver;
1505 Library* createNullDriver (void)
1507 return new NullDriverLibrary();