1 /*-------------------------------------------------------------------------
5 * Copyright (c) 2015 Google Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
21 * \brief Null (dummy) Vulkan implementation.
22 *//*--------------------------------------------------------------------*/
24 #include "vkNullDriver.hpp"
25 #include "vkPlatform.hpp"
26 #include "vkImageUtil.hpp"
27 #include "tcuFunctionLibrary.hpp"
44 void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope)
46 void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope);
48 throw std::bad_alloc();
52 void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem)
54 pAllocator->pfnFree(pAllocator->pUserData, mem);
57 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
58 Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
60 Object* obj = DE_NULL;
64 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
67 obj = new (mem) Object(parent, pCreateInfo);
68 DE_ASSERT(obj == mem);
72 pAllocator->pfnFree(pAllocator->pUserData, mem);
77 obj = new Object(parent, pCreateInfo);
79 return reinterpret_cast<Handle>(obj);
82 template<typename Object, typename Handle, typename CreateInfo>
83 Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
85 Object* obj = DE_NULL;
89 void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
92 obj = new (mem) Object(pCreateInfo);
93 DE_ASSERT(obj == mem);
97 pAllocator->pfnFree(pAllocator->pUserData, mem);
102 obj = new Object(pCreateInfo);
104 return reinterpret_cast<Handle>(obj);
107 template<typename Object, typename Handle>
108 void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
110 Object* obj = reinterpret_cast<Object*>(handle);
115 freeSystemMem(pAllocator, reinterpret_cast<void*>(obj));
121 template<typename Object, typename Handle, typename Parent, typename CreateInfo>
122 Handle allocateNonDispHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
124 Object* const obj = allocateHandle<Object, Object*>(parent, pCreateInfo, pAllocator);
125 return Handle((deUint64)(deUintptr)obj);
128 template<typename Object, typename Handle>
129 void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
131 freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator);
134 // Object definitions
136 #define VK_NULL_RETURN(STMT) \
141 } catch (const std::bad_alloc&) { \
142 return VK_ERROR_OUT_OF_HOST_MEMORY; \
143 } catch (VkResult res) { \
146 } while (deGetFalse())
148 // \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
149 #define VK_NULL_FUNC_ENTRY(NAME, FUNC) { #NAME, (deFunctionPtr)FUNC } // NOLINT(FUNC)
151 #define VK_NULL_DEFINE_DEVICE_OBJ(NAME) \
154 NAME (VkDevice, const Vk##NAME##CreateInfo*) {} \
157 VK_NULL_DEFINE_DEVICE_OBJ(Fence);
158 VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
159 VK_NULL_DEFINE_DEVICE_OBJ(Event);
160 VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
161 VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
162 VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
163 VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
164 VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
165 VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
166 VK_NULL_DEFINE_DEVICE_OBJ(RenderPass);
167 VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
168 VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
169 VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
174 Instance (const VkInstanceCreateInfo* instanceInfo);
177 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
180 const tcu::StaticFunctionLibrary m_functions;
186 SurfaceKHR (VkInstance, const VkXlibSurfaceCreateInfoKHR*) {}
187 SurfaceKHR (VkInstance, const VkXcbSurfaceCreateInfoKHR*) {}
188 SurfaceKHR (VkInstance, const VkWaylandSurfaceCreateInfoKHR*) {}
189 SurfaceKHR (VkInstance, const VkMirSurfaceCreateInfoKHR*) {}
190 SurfaceKHR (VkInstance, const VkAndroidSurfaceCreateInfoKHR*) {}
191 SurfaceKHR (VkInstance, const VkWin32SurfaceCreateInfoKHR*) {}
192 SurfaceKHR (VkInstance, const VkDisplaySurfaceCreateInfoKHR*) {}
193 ~SurfaceKHR (void) {}
199 DisplayModeKHR (VkDisplayKHR, const VkDisplayModeCreateInfoKHR*) {}
200 ~DisplayModeKHR (void) {}
203 class DebugReportCallbackEXT
206 DebugReportCallbackEXT (VkInstance, const VkDebugReportCallbackCreateInfoEXT*) {}
207 ~DebugReportCallbackEXT (void) {}
213 Device (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo);
216 PFN_vkVoidFunction getProcAddr (const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
219 const tcu::StaticFunctionLibrary m_functions;
225 Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {}
226 Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {}
232 SwapchainKHR (VkDevice, const VkSwapchainCreateInfoKHR*) {}
233 ~SwapchainKHR (void) {}
236 class IndirectCommandsLayoutNVX
239 IndirectCommandsLayoutNVX (VkDevice, const VkIndirectCommandsLayoutCreateInfoNVX*) {}
245 ObjectTableNVX (VkDevice, const VkObjectTableCreateInfoNVX*) {}
248 void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo)
250 // \todo [2015-12-03 pyry] Alignment requirements?
251 // \todo [2015-12-03 pyry] Empty allocations okay?
252 if (pAllocInfo->allocationSize > 0)
254 void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
256 throw std::bad_alloc();
263 void freeHeap (void* ptr)
271 DeviceMemory (VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
272 : m_memory(allocateHeap(pAllocInfo))
274 // \todo [2016-08-03 pyry] In some cases leaving data unintialized would help valgrind analysis,
275 // but currently it mostly hinders it.
277 deMemset(m_memory, 0xcd, (size_t)pAllocInfo->allocationSize);
284 void* getPtr (void) const { return m_memory; }
287 void* const m_memory;
293 Buffer (VkDevice, const VkBufferCreateInfo* pCreateInfo)
294 : m_size(pCreateInfo->size)
297 VkDeviceSize getSize (void) const { return m_size; }
300 const VkDeviceSize m_size;
306 Image (VkDevice, const VkImageCreateInfo* pCreateInfo)
307 : m_imageType (pCreateInfo->imageType)
308 , m_format (pCreateInfo->format)
309 , m_extent (pCreateInfo->extent)
310 , m_samples (pCreateInfo->samples)
313 VkImageType getImageType (void) const { return m_imageType; }
314 VkFormat getFormat (void) const { return m_format; }
315 VkExtent3D getExtent (void) const { return m_extent; }
316 VkSampleCountFlagBits getSamples (void) const { return m_samples; }
319 const VkImageType m_imageType;
320 const VkFormat m_format;
321 const VkExtent3D m_extent;
322 const VkSampleCountFlagBits m_samples;
328 CommandBuffer(VkDevice, VkCommandPool, VkCommandBufferLevel)
335 CommandPool (VkDevice device, const VkCommandPoolCreateInfo*)
340 VkCommandBuffer allocate (VkCommandBufferLevel level);
341 void free (VkCommandBuffer buffer);
344 const VkDevice m_device;
346 vector<CommandBuffer*> m_buffers;
349 CommandPool::~CommandPool (void)
351 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
352 delete m_buffers[ndx];
355 VkCommandBuffer CommandPool::allocate (VkCommandBufferLevel level)
357 CommandBuffer* const impl = new CommandBuffer(m_device, VkCommandPool(reinterpret_cast<deUintptr>(this)), level);
361 m_buffers.push_back(impl);
369 return reinterpret_cast<VkCommandBuffer>(impl);
372 void CommandPool::free (VkCommandBuffer buffer)
374 CommandBuffer* const impl = reinterpret_cast<CommandBuffer*>(buffer);
376 for (size_t ndx = 0; ndx < m_buffers.size(); ++ndx)
378 if (m_buffers[ndx] == impl)
380 std::swap(m_buffers[ndx], m_buffers.back());
381 m_buffers.pop_back();
387 DE_FATAL("VkCommandBuffer not owned by VkCommandPool");
393 DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {}
399 DescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo)
401 , m_flags (pCreateInfo->flags)
403 ~DescriptorPool (void)
408 VkDescriptorSet allocate (VkDescriptorSetLayout setLayout);
409 void free (VkDescriptorSet set);
414 const VkDevice m_device;
415 const VkDescriptorPoolCreateFlags m_flags;
417 vector<DescriptorSet*> m_managedSets;
420 VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout)
422 DescriptorSet* const impl = new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout);
426 m_managedSets.push_back(impl);
434 return VkDescriptorSet(reinterpret_cast<deUintptr>(impl));
437 void DescriptorPool::free (VkDescriptorSet set)
439 DescriptorSet* const impl = reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal());
441 DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
444 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
446 if (m_managedSets[ndx] == impl)
448 std::swap(m_managedSets[ndx], m_managedSets.back());
449 m_managedSets.pop_back();
455 DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
458 void DescriptorPool::reset (void)
460 for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
461 delete m_managedSets[ndx];
462 m_managedSets.clear();
465 // API implementation
470 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName)
472 return reinterpret_cast<Instance*>(instance)->getProcAddr(pName);
475 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName)
477 return reinterpret_cast<Device*>(device)->getProcAddr(pName);
480 VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
485 for (allocNdx = 0; allocNdx < count; allocNdx++)
486 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
490 catch (const std::bad_alloc&)
492 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
493 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
495 return VK_ERROR_OUT_OF_HOST_MEMORY;
499 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
500 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
506 VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
511 for (allocNdx = 0; allocNdx < count; allocNdx++)
512 pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
516 catch (const std::bad_alloc&)
518 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
519 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
521 return VK_ERROR_OUT_OF_HOST_MEMORY;
525 for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
526 freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
532 VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices)
534 if (pDevices && *pPhysicalDeviceCount >= 1u)
535 *pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u);
537 *pPhysicalDeviceCount = 1;
542 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
544 DE_UNREF(physicalDevice);
546 // Enable all features allow as many tests to run as possible
547 pFeatures->robustBufferAccess = VK_TRUE;
548 pFeatures->fullDrawIndexUint32 = VK_TRUE;
549 pFeatures->imageCubeArray = VK_TRUE;
550 pFeatures->independentBlend = VK_TRUE;
551 pFeatures->geometryShader = VK_TRUE;
552 pFeatures->tessellationShader = VK_TRUE;
553 pFeatures->sampleRateShading = VK_TRUE;
554 pFeatures->dualSrcBlend = VK_TRUE;
555 pFeatures->logicOp = VK_TRUE;
556 pFeatures->multiDrawIndirect = VK_TRUE;
557 pFeatures->drawIndirectFirstInstance = VK_TRUE;
558 pFeatures->depthClamp = VK_TRUE;
559 pFeatures->depthBiasClamp = VK_TRUE;
560 pFeatures->fillModeNonSolid = VK_TRUE;
561 pFeatures->depthBounds = VK_TRUE;
562 pFeatures->wideLines = VK_TRUE;
563 pFeatures->largePoints = VK_TRUE;
564 pFeatures->alphaToOne = VK_TRUE;
565 pFeatures->multiViewport = VK_TRUE;
566 pFeatures->samplerAnisotropy = VK_TRUE;
567 pFeatures->textureCompressionETC2 = VK_TRUE;
568 pFeatures->textureCompressionASTC_LDR = VK_TRUE;
569 pFeatures->textureCompressionBC = VK_TRUE;
570 pFeatures->occlusionQueryPrecise = VK_TRUE;
571 pFeatures->pipelineStatisticsQuery = VK_TRUE;
572 pFeatures->vertexPipelineStoresAndAtomics = VK_TRUE;
573 pFeatures->fragmentStoresAndAtomics = VK_TRUE;
574 pFeatures->shaderTessellationAndGeometryPointSize = VK_TRUE;
575 pFeatures->shaderImageGatherExtended = VK_TRUE;
576 pFeatures->shaderStorageImageExtendedFormats = VK_TRUE;
577 pFeatures->shaderStorageImageMultisample = VK_TRUE;
578 pFeatures->shaderStorageImageReadWithoutFormat = VK_TRUE;
579 pFeatures->shaderStorageImageWriteWithoutFormat = VK_TRUE;
580 pFeatures->shaderUniformBufferArrayDynamicIndexing = VK_TRUE;
581 pFeatures->shaderSampledImageArrayDynamicIndexing = VK_TRUE;
582 pFeatures->shaderStorageBufferArrayDynamicIndexing = VK_TRUE;
583 pFeatures->shaderStorageImageArrayDynamicIndexing = VK_TRUE;
584 pFeatures->shaderClipDistance = VK_TRUE;
585 pFeatures->shaderCullDistance = VK_TRUE;
586 pFeatures->shaderFloat64 = VK_TRUE;
587 pFeatures->shaderInt64 = VK_TRUE;
588 pFeatures->shaderInt16 = VK_TRUE;
589 pFeatures->shaderResourceResidency = VK_TRUE;
590 pFeatures->shaderResourceMinLod = VK_TRUE;
591 pFeatures->sparseBinding = VK_TRUE;
592 pFeatures->sparseResidencyBuffer = VK_TRUE;
593 pFeatures->sparseResidencyImage2D = VK_TRUE;
594 pFeatures->sparseResidencyImage3D = VK_TRUE;
595 pFeatures->sparseResidency2Samples = VK_TRUE;
596 pFeatures->sparseResidency4Samples = VK_TRUE;
597 pFeatures->sparseResidency8Samples = VK_TRUE;
598 pFeatures->sparseResidency16Samples = VK_TRUE;
599 pFeatures->sparseResidencyAliased = VK_TRUE;
600 pFeatures->variableMultisampleRate = VK_TRUE;
601 pFeatures->inheritedQueries = VK_TRUE;
604 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props)
606 deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
608 props->apiVersion = VK_API_VERSION;
609 props->driverVersion = 1u;
610 props->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
612 deMemcpy(props->deviceName, "null", 5);
615 props->limits.maxImageDimension1D = 4096;
616 props->limits.maxImageDimension2D = 4096;
617 props->limits.maxImageDimension3D = 256;
618 props->limits.maxImageDimensionCube = 4096;
619 props->limits.maxImageArrayLayers = 256;
620 props->limits.maxTexelBufferElements = 65536;
621 props->limits.maxUniformBufferRange = 16384;
622 props->limits.maxStorageBufferRange = 1u<<27;
623 props->limits.maxPushConstantsSize = 128;
624 props->limits.maxMemoryAllocationCount = 4096;
625 props->limits.maxSamplerAllocationCount = 4000;
626 props->limits.bufferImageGranularity = 131072;
627 props->limits.sparseAddressSpaceSize = 1u<<31;
628 props->limits.maxBoundDescriptorSets = 4;
629 props->limits.maxPerStageDescriptorSamplers = 16;
630 props->limits.maxPerStageDescriptorUniformBuffers = 12;
631 props->limits.maxPerStageDescriptorStorageBuffers = 4;
632 props->limits.maxPerStageDescriptorSampledImages = 16;
633 props->limits.maxPerStageDescriptorStorageImages = 4;
634 props->limits.maxPerStageDescriptorInputAttachments = 4;
635 props->limits.maxPerStageResources = 128;
636 props->limits.maxDescriptorSetSamplers = 96;
637 props->limits.maxDescriptorSetUniformBuffers = 72;
638 props->limits.maxDescriptorSetUniformBuffersDynamic = 8;
639 props->limits.maxDescriptorSetStorageBuffers = 24;
640 props->limits.maxDescriptorSetStorageBuffersDynamic = 4;
641 props->limits.maxDescriptorSetSampledImages = 96;
642 props->limits.maxDescriptorSetStorageImages = 24;
643 props->limits.maxDescriptorSetInputAttachments = 4;
644 props->limits.maxVertexInputAttributes = 16;
645 props->limits.maxVertexInputBindings = 16;
646 props->limits.maxVertexInputAttributeOffset = 2047;
647 props->limits.maxVertexInputBindingStride = 2048;
648 props->limits.maxVertexOutputComponents = 64;
649 props->limits.maxTessellationGenerationLevel = 64;
650 props->limits.maxTessellationPatchSize = 32;
651 props->limits.maxTessellationControlPerVertexInputComponents = 64;
652 props->limits.maxTessellationControlPerVertexOutputComponents = 64;
653 props->limits.maxTessellationControlPerPatchOutputComponents = 120;
654 props->limits.maxTessellationControlTotalOutputComponents = 2048;
655 props->limits.maxTessellationEvaluationInputComponents = 64;
656 props->limits.maxTessellationEvaluationOutputComponents = 64;
657 props->limits.maxGeometryShaderInvocations = 32;
658 props->limits.maxGeometryInputComponents = 64;
659 props->limits.maxGeometryOutputComponents = 64;
660 props->limits.maxGeometryOutputVertices = 256;
661 props->limits.maxGeometryTotalOutputComponents = 1024;
662 props->limits.maxFragmentInputComponents = 64;
663 props->limits.maxFragmentOutputAttachments = 4;
664 props->limits.maxFragmentDualSrcAttachments = 1;
665 props->limits.maxFragmentCombinedOutputResources = 4;
666 props->limits.maxComputeSharedMemorySize = 16384;
667 props->limits.maxComputeWorkGroupCount[0] = 65535;
668 props->limits.maxComputeWorkGroupCount[1] = 65535;
669 props->limits.maxComputeWorkGroupCount[2] = 65535;
670 props->limits.maxComputeWorkGroupInvocations = 128;
671 props->limits.maxComputeWorkGroupSize[0] = 128;
672 props->limits.maxComputeWorkGroupSize[1] = 128;
673 props->limits.maxComputeWorkGroupSize[2] = 128;
674 props->limits.subPixelPrecisionBits = 4;
675 props->limits.subTexelPrecisionBits = 4;
676 props->limits.mipmapPrecisionBits = 4;
677 props->limits.maxDrawIndexedIndexValue = 0xffffffffu;
678 props->limits.maxDrawIndirectCount = (1u<<16) - 1u;
679 props->limits.maxSamplerLodBias = 2.0f;
680 props->limits.maxSamplerAnisotropy = 16.0f;
681 props->limits.maxViewports = 16;
682 props->limits.maxViewportDimensions[0] = 4096;
683 props->limits.maxViewportDimensions[1] = 4096;
684 props->limits.viewportBoundsRange[0] = -8192.f;
685 props->limits.viewportBoundsRange[1] = 8191.f;
686 props->limits.viewportSubPixelBits = 0;
687 props->limits.minMemoryMapAlignment = 64;
688 props->limits.minTexelBufferOffsetAlignment = 256;
689 props->limits.minUniformBufferOffsetAlignment = 256;
690 props->limits.minStorageBufferOffsetAlignment = 256;
691 props->limits.minTexelOffset = -8;
692 props->limits.maxTexelOffset = 7;
693 props->limits.minTexelGatherOffset = -8;
694 props->limits.maxTexelGatherOffset = 7;
695 props->limits.minInterpolationOffset = -0.5f;
696 props->limits.maxInterpolationOffset = 0.5f; // -1ulp
697 props->limits.subPixelInterpolationOffsetBits = 4;
698 props->limits.maxFramebufferWidth = 4096;
699 props->limits.maxFramebufferHeight = 4096;
700 props->limits.maxFramebufferLayers = 256;
701 props->limits.framebufferColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
702 props->limits.framebufferDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
703 props->limits.framebufferStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
704 props->limits.framebufferNoAttachmentsSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
705 props->limits.maxColorAttachments = 4;
706 props->limits.sampledImageColorSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
707 props->limits.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT;
708 props->limits.sampledImageDepthSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
709 props->limits.sampledImageStencilSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
710 props->limits.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
711 props->limits.maxSampleMaskWords = 1;
712 props->limits.timestampComputeAndGraphics = VK_TRUE;
713 props->limits.timestampPeriod = 1.0f;
714 props->limits.maxClipDistances = 8;
715 props->limits.maxCullDistances = 8;
716 props->limits.maxCombinedClipAndCullDistances = 8;
717 props->limits.discreteQueuePriorities = 2;
718 props->limits.pointSizeRange[0] = 1.0f;
719 props->limits.pointSizeRange[1] = 64.0f; // -1ulp
720 props->limits.lineWidthRange[0] = 1.0f;
721 props->limits.lineWidthRange[1] = 8.0f; // -1ulp
722 props->limits.pointSizeGranularity = 1.0f;
723 props->limits.lineWidthGranularity = 1.0f;
724 props->limits.strictLines = 0;
725 props->limits.standardSampleLocations = VK_TRUE;
726 props->limits.optimalBufferCopyOffsetAlignment = 256;
727 props->limits.optimalBufferCopyRowPitchAlignment = 256;
728 props->limits.nonCoherentAtomSize = 128;
731 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props)
733 if (props && *count >= 1u)
735 deMemset(props, 0, sizeof(VkQueueFamilyProperties));
737 props->queueCount = 4u;
738 props->queueFlags = VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT;
739 props->timestampValidBits = 64;
745 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props)
747 deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
749 props->memoryTypeCount = 1u;
750 props->memoryTypes[0].heapIndex = 0u;
751 props->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT|VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
753 props->memoryHeapCount = 1u;
754 props->memoryHeaps[0].size = 1ull << 31;
755 props->memoryHeaps[0].flags = 0u;
758 VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat, VkFormatProperties* pFormatProperties)
760 const VkFormatFeatureFlags allFeatures = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
761 | VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT
762 | VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT
763 | VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT
764 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT
765 | VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT
766 | VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT
767 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
768 | VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT
769 | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT
770 | VK_FORMAT_FEATURE_BLIT_SRC_BIT
771 | VK_FORMAT_FEATURE_BLIT_DST_BIT
772 | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
774 pFormatProperties->linearTilingFeatures = allFeatures;
775 pFormatProperties->optimalTilingFeatures = allFeatures;
776 pFormatProperties->bufferFeatures = allFeatures;
779 VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
781 DE_UNREF(physicalDevice);
788 pImageFormatProperties->maxArrayLayers = 8;
789 pImageFormatProperties->maxExtent.width = 4096;
790 pImageFormatProperties->maxExtent.height = 4096;
791 pImageFormatProperties->maxExtent.depth = 4096;
792 pImageFormatProperties->maxMipLevels = deLog2Ceil32(4096) + 1;
793 pImageFormatProperties->maxResourceSize = 64u * 1024u * 1024u;
794 pImageFormatProperties->sampleCounts = VK_SAMPLE_COUNT_1_BIT|VK_SAMPLE_COUNT_4_BIT;
799 VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue)
802 DE_UNREF(queueFamilyIndex);
805 *pQueue = reinterpret_cast<VkQueue>((deUint64)queueIndex + 1);
808 VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements)
810 const Buffer* buffer = reinterpret_cast<const Buffer*>(bufferHandle.getInternal());
812 requirements->memoryTypeBits = 1u;
813 requirements->size = buffer->getSize();
814 requirements->alignment = (VkDeviceSize)1u;
817 VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
819 return (VkDeviceSize)getPixelSize(mapVkFormat(format))
820 * (VkDeviceSize)extent.width
821 * (VkDeviceSize)extent.height
822 * (VkDeviceSize)extent.depth
823 * (VkDeviceSize)samples;
826 VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent)
830 const tcu::CompressedTexFormat tcuFormat = mapVkCompressedFormat(format);
831 const size_t blockSize = tcu::getBlockSize(tcuFormat);
832 const tcu::IVec3 blockPixelSize = tcu::getBlockPixelSize(tcuFormat);
833 const int numBlocksX = deDivRoundUp32((int)extent.width, blockPixelSize.x());
834 const int numBlocksY = deDivRoundUp32((int)extent.height, blockPixelSize.y());
835 const int numBlocksZ = deDivRoundUp32((int)extent.depth, blockPixelSize.z());
837 return blockSize*numBlocksX*numBlocksY*numBlocksZ;
841 return 0; // Unsupported compressed format
845 VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements)
847 const Image* image = reinterpret_cast<const Image*>(imageHandle.getInternal());
849 requirements->memoryTypeBits = 1u;
850 requirements->alignment = 16u;
852 if (isCompressedFormat(image->getFormat()))
853 requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
855 requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
858 VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
860 const DeviceMemory* memory = reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
865 *ppData = (deUint8*)memory->getPtr() + offset;
870 VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
872 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal());
874 for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
878 pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
880 catch (const std::bad_alloc&)
882 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
883 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
885 return VK_ERROR_OUT_OF_HOST_MEMORY;
889 for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
890 delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
899 VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets)
901 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
903 for (deUint32 ndx = 0; ndx < count; ++ndx)
904 poolImpl->free(pDescriptorSets[ndx]);
907 VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags)
909 DescriptorPool* const poolImpl = reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
916 VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
920 if (pAllocateInfo && pCommandBuffers)
922 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)pAllocateInfo->commandPool.getInternal());
924 for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
925 pCommandBuffers[ndx] = poolImpl->allocate(pAllocateInfo->level);
931 VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
933 CommandPool* const poolImpl = reinterpret_cast<CommandPool*>((deUintptr)commandPool.getInternal());
937 for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx)
938 poolImpl->free(pCommandBuffers[ndx]);
942 VKAPI_ATTR VkResult VKAPI_CALL createDisplayModeKHR (VkPhysicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode)
944 DE_UNREF(pAllocator);
945 VK_NULL_RETURN((*pMode = allocateNonDispHandle<DisplayModeKHR, VkDisplayModeKHR>(display, pCreateInfo, pAllocator)));
948 VKAPI_ATTR VkResult VKAPI_CALL createSharedSwapchainsKHR (VkDevice device, deUint32 swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains)
950 for (deUint32 ndx = 0; ndx < swapchainCount; ++ndx)
952 pSwapchains[ndx] = allocateNonDispHandle<SwapchainKHR, VkSwapchainKHR>(device, pCreateInfos+ndx, pAllocator);
958 #include "vkNullDriverImpl.inl"
962 Instance::Instance (const VkInstanceCreateInfo*)
963 : m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
967 Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*)
968 : m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
972 class NullDriverLibrary : public Library
975 NullDriverLibrary (void)
976 : m_library (s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
977 , m_driver (m_library)
980 const PlatformInterface& getPlatformInterface (void) const { return m_driver; }
983 const tcu::StaticFunctionLibrary m_library;
984 const PlatformDriver m_driver;
989 Library* createNullDriver (void)
991 return new NullDriverLibrary();