3 # Copyright (c) 2015-2017 The Khronos Group Inc.
4 # Copyright (c) 2015-2017 Valve Corporation
5 # Copyright (c) 2015-2017 LunarG, Inc.
6 # Copyright (c) 2015-2017 Google Inc.
8 # Licensed under the Apache License, Version 2.0 (the "License");
9 # you may not use this file except in compliance with the License.
10 # You may obtain a copy of the License at
12 # http://www.apache.org/licenses/LICENSE-2.0
14 # Unless required by applicable law or agreed to in writing, software
15 # distributed under the License is distributed on an "AS IS" BASIS,
16 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # See the License for the specific language governing permissions and
18 # limitations under the License.
20 # Author: Tobin Ehlis <tobine@google.com>
22 # This script generates a Mock ICD that intercepts almost all Vulkan
23 # functions. That layer is not intended to be useful or even compilable
24 # in its initial state. Rather it's intended to be a starting point that
25 # can be copied and customized to assist in creation of a new layer.
28 from generator import *
29 from common_codegen import *
34 using mutex_t = std::mutex;
35 using lock_guard_t = std::lock_guard<mutex_t>;
36 using unique_lock_t = std::unique_lock<mutex_t>;
38 static mutex_t global_lock;
39 static uint64_t global_unique_handle = 1;
40 static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5;
41 static uint32_t loader_interface_version = 0;
42 static bool negotiate_loader_icd_interface_called = false;
43 static void* CreateDispObjHandle() {
44 auto handle = new VK_LOADER_DATA;
45 set_loader_magic_value(handle);
48 static void DestroyDispObjHandle(void* handle) {
49 delete reinterpret_cast<VK_LOADER_DATA*>(handle);
53 # Manual code at the top of the cpp source file
54 SOURCE_CPP_PREFIX = '''
55 using std::unordered_map;
57 static constexpr uint32_t icd_physical_device_count = 1;
58 static unordered_map<VkInstance, std::array<VkPhysicalDevice, icd_physical_device_count>> physical_device_map;
60 // Map device memory handle to any mapped allocations that we'll need to free on unmap
61 static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map;
63 // Map device memory allocation handle to the size
64 static unordered_map<VkDeviceMemory, VkDeviceSize> allocated_memory_size_map;
66 static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map;
67 static unordered_map<VkDevice, unordered_map<VkBuffer, VkBufferCreateInfo>> buffer_map;
68 static unordered_map<VkDevice, unordered_map<VkImage, VkDeviceSize>> image_memory_size_map;
70 static constexpr uint32_t icd_swapchain_image_count = 1;
71 static unordered_map<VkSwapchainKHR, VkImage[icd_swapchain_image_count]> swapchain_image_map;
73 // TODO: Would like to codegen this but limits aren't in XML
74 static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) {
75 limits->maxImageDimension1D = 4096;
76 limits->maxImageDimension2D = 4096;
77 limits->maxImageDimension3D = 256;
78 limits->maxImageDimensionCube = 4096;
79 limits->maxImageArrayLayers = 256;
80 limits->maxTexelBufferElements = 65536;
81 limits->maxUniformBufferRange = 16384;
82 limits->maxStorageBufferRange = 134217728;
83 limits->maxPushConstantsSize = 128;
84 limits->maxMemoryAllocationCount = 4096;
85 limits->maxSamplerAllocationCount = 4000;
86 limits->bufferImageGranularity = 1;
87 limits->sparseAddressSpaceSize = 2147483648;
88 limits->maxBoundDescriptorSets = 4;
89 limits->maxPerStageDescriptorSamplers = 16;
90 limits->maxPerStageDescriptorUniformBuffers = 12;
91 limits->maxPerStageDescriptorStorageBuffers = 4;
92 limits->maxPerStageDescriptorSampledImages = 16;
93 limits->maxPerStageDescriptorStorageImages = 4;
94 limits->maxPerStageDescriptorInputAttachments = 4;
95 limits->maxPerStageResources = 128;
96 limits->maxDescriptorSetSamplers = 96;
97 limits->maxDescriptorSetUniformBuffers = 72;
98 limits->maxDescriptorSetUniformBuffersDynamic = 8;
99 limits->maxDescriptorSetStorageBuffers = 24;
100 limits->maxDescriptorSetStorageBuffersDynamic = 4;
101 limits->maxDescriptorSetSampledImages = 96;
102 limits->maxDescriptorSetStorageImages = 24;
103 limits->maxDescriptorSetInputAttachments = 4;
104 limits->maxVertexInputAttributes = 16;
105 limits->maxVertexInputBindings = 16;
106 limits->maxVertexInputAttributeOffset = 2047;
107 limits->maxVertexInputBindingStride = 2048;
108 limits->maxVertexOutputComponents = 64;
109 limits->maxTessellationGenerationLevel = 64;
110 limits->maxTessellationPatchSize = 32;
111 limits->maxTessellationControlPerVertexInputComponents = 64;
112 limits->maxTessellationControlPerVertexOutputComponents = 64;
113 limits->maxTessellationControlPerPatchOutputComponents = 120;
114 limits->maxTessellationControlTotalOutputComponents = 2048;
115 limits->maxTessellationEvaluationInputComponents = 64;
116 limits->maxTessellationEvaluationOutputComponents = 64;
117 limits->maxGeometryShaderInvocations = 32;
118 limits->maxGeometryInputComponents = 64;
119 limits->maxGeometryOutputComponents = 64;
120 limits->maxGeometryOutputVertices = 256;
121 limits->maxGeometryTotalOutputComponents = 1024;
122 limits->maxFragmentInputComponents = 64;
123 limits->maxFragmentOutputAttachments = 4;
124 limits->maxFragmentDualSrcAttachments = 1;
125 limits->maxFragmentCombinedOutputResources = 4;
126 limits->maxComputeSharedMemorySize = 16384;
127 limits->maxComputeWorkGroupCount[0] = 65535;
128 limits->maxComputeWorkGroupCount[1] = 65535;
129 limits->maxComputeWorkGroupCount[2] = 65535;
130 limits->maxComputeWorkGroupInvocations = 128;
131 limits->maxComputeWorkGroupSize[0] = 128;
132 limits->maxComputeWorkGroupSize[1] = 128;
133 limits->maxComputeWorkGroupSize[2] = 64;
134 limits->subPixelPrecisionBits = 4;
135 limits->subTexelPrecisionBits = 4;
136 limits->mipmapPrecisionBits = 4;
137 limits->maxDrawIndexedIndexValue = UINT32_MAX;
138 limits->maxDrawIndirectCount = UINT16_MAX;
139 limits->maxSamplerLodBias = 2.0f;
140 limits->maxSamplerAnisotropy = 16;
141 limits->maxViewports = 16;
142 limits->maxViewportDimensions[0] = 4096;
143 limits->maxViewportDimensions[1] = 4096;
144 limits->viewportBoundsRange[0] = -8192;
145 limits->viewportBoundsRange[1] = 8191;
146 limits->viewportSubPixelBits = 0;
147 limits->minMemoryMapAlignment = 64;
148 limits->minTexelBufferOffsetAlignment = 16;
149 limits->minUniformBufferOffsetAlignment = 16;
150 limits->minStorageBufferOffsetAlignment = 16;
151 limits->minTexelOffset = -8;
152 limits->maxTexelOffset = 7;
153 limits->minTexelGatherOffset = -8;
154 limits->maxTexelGatherOffset = 7;
155 limits->minInterpolationOffset = 0.0f;
156 limits->maxInterpolationOffset = 0.5f;
157 limits->subPixelInterpolationOffsetBits = 4;
158 limits->maxFramebufferWidth = 4096;
159 limits->maxFramebufferHeight = 4096;
160 limits->maxFramebufferLayers = 256;
161 limits->framebufferColorSampleCounts = 0x7F;
162 limits->framebufferDepthSampleCounts = 0x7F;
163 limits->framebufferStencilSampleCounts = 0x7F;
164 limits->framebufferNoAttachmentsSampleCounts = 0x7F;
165 limits->maxColorAttachments = 4;
166 limits->sampledImageColorSampleCounts = 0x7F;
167 limits->sampledImageIntegerSampleCounts = 0x7F;
168 limits->sampledImageDepthSampleCounts = 0x7F;
169 limits->sampledImageStencilSampleCounts = 0x7F;
170 limits->storageImageSampleCounts = 0x7F;
171 limits->maxSampleMaskWords = 1;
172 limits->timestampComputeAndGraphics = VK_TRUE;
173 limits->timestampPeriod = 1;
174 limits->maxClipDistances = 8;
175 limits->maxCullDistances = 8;
176 limits->maxCombinedClipAndCullDistances = 8;
177 limits->discreteQueuePriorities = 2;
178 limits->pointSizeRange[0] = 1.0f;
179 limits->pointSizeRange[1] = 64.0f;
180 limits->lineWidthRange[0] = 1.0f;
181 limits->lineWidthRange[1] = 8.0f;
182 limits->pointSizeGranularity = 1.0f;
183 limits->lineWidthGranularity = 1.0f;
184 limits->strictLines = VK_TRUE;
185 limits->standardSampleLocations = VK_TRUE;
186 limits->optimalBufferCopyOffsetAlignment = 1;
187 limits->optimalBufferCopyRowPitchAlignment = 1;
188 limits->nonCoherentAtomSize = 256;
193 void SetBoolArrayTrue(VkBool32* bool_array, uint32_t num_bools)
195 for (uint32_t i = 0; i < num_bools; ++i) {
196 bool_array[i] = VK_TRUE;
201 # Manual code at the end of the cpp source file
202 SOURCE_CPP_POSTFIX = '''
204 static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
205 // TODO: This function should only care about physical device functions and return nullptr for other functions
206 const auto &item = name_to_funcptr_map.find(funcName);
207 if (item != name_to_funcptr_map.end()) {
208 return reinterpret_cast<PFN_vkVoidFunction>(item->second);
210 // Mock should intercept all functions so if we get here just return null
214 } // namespace vkmock
216 #if defined(__GNUC__) && __GNUC__ >= 4
217 #define EXPORT __attribute__((visibility("default")))
218 #elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
219 #define EXPORT __attribute__((visibility("default")))
226 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) {
227 if (!vkmock::negotiate_loader_icd_interface_called) {
228 vkmock::loader_interface_version = 1;
230 return vkmock::GetInstanceProcAddr(instance, pName);
233 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) {
234 return vkmock::GetPhysicalDeviceProcAddr(instance, pName);
237 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) {
238 vkmock::negotiate_loader_icd_interface_called = true;
239 vkmock::loader_interface_version = *pSupportedVersion;
240 if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
241 *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION;
247 EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
249 VkSurfaceKHR surface,
250 const VkAllocationCallbacks* pAllocator)
252 vkmock::DestroySurfaceKHR(instance, surface, pAllocator);
255 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
256 VkPhysicalDevice physicalDevice,
257 uint32_t queueFamilyIndex,
258 VkSurfaceKHR surface,
259 VkBool32* pSupported)
261 return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
264 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
265 VkPhysicalDevice physicalDevice,
266 VkSurfaceKHR surface,
267 VkSurfaceCapabilitiesKHR* pSurfaceCapabilities)
269 return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
272 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
273 VkPhysicalDevice physicalDevice,
274 VkSurfaceKHR surface,
275 uint32_t* pSurfaceFormatCount,
276 VkSurfaceFormatKHR* pSurfaceFormats)
278 return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
281 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
282 VkPhysicalDevice physicalDevice,
283 VkSurfaceKHR surface,
284 uint32_t* pPresentModeCount,
285 VkPresentModeKHR* pPresentModes)
287 return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
290 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
292 const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
293 const VkAllocationCallbacks* pAllocator,
294 VkSurfaceKHR* pSurface)
296 return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
299 #ifdef VK_USE_PLATFORM_XLIB_KHR
301 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
303 const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
304 const VkAllocationCallbacks* pAllocator,
305 VkSurfaceKHR* pSurface)
307 return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
309 #endif /* VK_USE_PLATFORM_XLIB_KHR */
311 #ifdef VK_USE_PLATFORM_XCB_KHR
313 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
315 const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
316 const VkAllocationCallbacks* pAllocator,
317 VkSurfaceKHR* pSurface)
319 return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
321 #endif /* VK_USE_PLATFORM_XCB_KHR */
323 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
325 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
327 const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
328 const VkAllocationCallbacks* pAllocator,
329 VkSurfaceKHR* pSurface)
331 return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
333 #endif /* VK_USE_PLATFORM_WAYLAND_KHR */
335 #ifdef VK_USE_PLATFORM_ANDROID_KHR
337 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
339 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
340 const VkAllocationCallbacks* pAllocator,
341 VkSurfaceKHR* pSurface)
343 return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
345 #endif /* VK_USE_PLATFORM_ANDROID_KHR */
347 #ifdef VK_USE_PLATFORM_WIN32_KHR
349 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
351 const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
352 const VkAllocationCallbacks* pAllocator,
353 VkSurfaceKHR* pSurface)
355 return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
357 #endif /* VK_USE_PLATFORM_WIN32_KHR */
359 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
361 VkSurfaceKHR surface,
362 VkDeviceGroupPresentModeFlagsKHR* pModes)
364 return vkmock::GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
367 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
368 VkPhysicalDevice physicalDevice,
369 VkSurfaceKHR surface,
370 uint32_t* pRectCount,
373 return vkmock::GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
376 #ifdef VK_USE_PLATFORM_VI_NN
378 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
380 const VkViSurfaceCreateInfoNN* pCreateInfo,
381 const VkAllocationCallbacks* pAllocator,
382 VkSurfaceKHR* pSurface)
384 return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
386 #endif /* VK_USE_PLATFORM_VI_NN */
388 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
389 VkPhysicalDevice physicalDevice,
390 VkSurfaceKHR surface,
391 VkSurfaceCapabilities2EXT* pSurfaceCapabilities)
393 return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
396 #ifdef VK_USE_PLATFORM_IOS_MVK
398 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
400 const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
401 const VkAllocationCallbacks* pAllocator,
402 VkSurfaceKHR* pSurface)
404 return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
406 #endif /* VK_USE_PLATFORM_IOS_MVK */
408 #ifdef VK_USE_PLATFORM_MACOS_MVK
410 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
412 const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
413 const VkAllocationCallbacks* pAllocator,
414 VkSurfaceKHR* pSurface)
416 return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
418 #endif /* VK_USE_PLATFORM_MACOS_MVK */
424 CUSTOM_C_INTERCEPTS = {
425 'vkCreateInstance': '''
426 // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
427 // apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
428 // ICD should behave as normal.
429 if (loader_interface_version <= 4) {
430 return VK_ERROR_INCOMPATIBLE_DRIVER;
432 *pInstance = (VkInstance)CreateDispObjHandle();
433 for (auto& physical_device : physical_device_map[*pInstance])
434 physical_device = (VkPhysicalDevice)CreateDispObjHandle();
435 // TODO: If emulating specific device caps, will need to add intelligence here
438 'vkDestroyInstance': '''
440 for (const auto physical_device : physical_device_map.at(instance))
441 DestroyDispObjHandle((void*)physical_device);
442 physical_device_map.erase(instance);
443 DestroyDispObjHandle((void*)instance);
446 'vkEnumeratePhysicalDevices': '''
447 VkResult result_code = VK_SUCCESS;
448 if (pPhysicalDevices) {
449 const auto return_count = (std::min)(*pPhysicalDeviceCount, icd_physical_device_count);
450 for (uint32_t i = 0; i < return_count; ++i) pPhysicalDevices[i] = physical_device_map.at(instance)[i];
451 if (return_count < icd_physical_device_count) result_code = VK_INCOMPLETE;
452 *pPhysicalDeviceCount = return_count;
454 *pPhysicalDeviceCount = icd_physical_device_count;
458 'vkCreateDevice': '''
459 *pDevice = (VkDevice)CreateDispObjHandle();
460 // TODO: If emulating specific device caps, will need to add intelligence here
463 'vkDestroyDevice': '''
464 unique_lock_t lock(global_lock);
465 // First destroy sub-device objects
467 for (auto dev_queue_map_pair : queue_map) {
468 for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) {
469 for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) {
470 DestroyDispObjHandle((void*)index_queue_pair.second);
475 buffer_map.erase(device);
476 image_memory_size_map.erase(device);
477 // Now destroy device
478 DestroyDispObjHandle((void*)device);
479 // TODO: If emulating specific device caps, will need to add intelligence here
481 'vkGetDeviceQueue': '''
482 unique_lock_t lock(global_lock);
483 auto queue = queue_map[device][queueFamilyIndex][queueIndex];
487 *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
489 // TODO: If emulating specific device caps, will need to add intelligence here
492 'vkGetDeviceQueue2': '''
493 GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
494 // TODO: Add further support for GetDeviceQueue2 features
496 'vkEnumerateInstanceLayerProperties': '''
499 'vkEnumerateInstanceVersion': '''
500 *pApiVersion = VK_API_VERSION_1_1;
503 'vkEnumerateDeviceLayerProperties': '''
506 'vkEnumerateInstanceExtensionProperties': '''
507 // If requesting number of extensions, return that
510 *pPropertyCount = (uint32_t)instance_extension_map.size();
513 for (const auto &name_ver_pair : instance_extension_map) {
514 if (i == *pPropertyCount) {
517 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
518 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
519 pProperties[i].specVersion = name_ver_pair.second;
522 if (i != instance_extension_map.size()) {
523 return VK_INCOMPLETE;
527 // If requesting extension properties, fill in data struct for number of extensions
530 'vkEnumerateDeviceExtensionProperties': '''
531 // If requesting number of extensions, return that
534 *pPropertyCount = (uint32_t)device_extension_map.size();
537 for (const auto &name_ver_pair : device_extension_map) {
538 if (i == *pPropertyCount) {
541 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
542 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
543 pProperties[i].specVersion = name_ver_pair.second;
546 if (i != device_extension_map.size()) {
547 return VK_INCOMPLETE;
551 // If requesting extension properties, fill in data struct for number of extensions
554 'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
555 // Currently always say that all present modes are supported
556 if (!pPresentModes) {
557 *pPresentModeCount = 6;
559 // Intentionally falling through and just filling however many modes are requested
560 switch(*pPresentModeCount) {
562 pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
565 pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
568 pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
571 pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
574 pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
577 pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
583 'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
584 // Currently always say that RGBA8 & BGRA8 are supported
585 if (!pSurfaceFormats) {
586 *pSurfaceFormatCount = 2;
588 // Intentionally falling through and just filling however many types are requested
589 switch(*pSurfaceFormatCount) {
591 pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
592 pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
595 pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
596 pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
602 'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
603 // Currently always say that RGBA8 & BGRA8 are supported
604 if (!pSurfaceFormats) {
605 *pSurfaceFormatCount = 2;
607 // Intentionally falling through and just filling however many types are requested
608 switch(*pSurfaceFormatCount) {
610 pSurfaceFormats[1].pNext = nullptr;
611 pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
612 pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
615 pSurfaceFormats[1].pNext = nullptr;
616 pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
617 pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
623 'vkGetPhysicalDeviceSurfaceSupportKHR': '''
624 // Currently say that all surface/queue combos are supported
625 *pSupported = VK_TRUE;
628 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
629 // In general just say max supported is available for requested surface
630 pSurfaceCapabilities->minImageCount = 1;
631 pSurfaceCapabilities->maxImageCount = 0;
632 pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
633 pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
634 pSurfaceCapabilities->minImageExtent.width = 1;
635 pSurfaceCapabilities->minImageExtent.height = 1;
636 pSurfaceCapabilities->maxImageExtent.width = 3840;
637 pSurfaceCapabilities->maxImageExtent.height = 2160;
638 pSurfaceCapabilities->maxImageArrayLayers = 128;
639 pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
640 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
641 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
642 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
643 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
644 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
645 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
646 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
647 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
648 pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
649 pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
650 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
651 VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
652 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
653 pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
654 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
655 VK_IMAGE_USAGE_SAMPLED_BIT |
656 VK_IMAGE_USAGE_STORAGE_BIT |
657 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
658 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
659 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
660 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
663 'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
664 GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
667 'vkGetInstanceProcAddr': '''
668 if (!negotiate_loader_icd_interface_called) {
669 loader_interface_version = 0;
671 const auto &item = name_to_funcptr_map.find(pName);
672 if (item != name_to_funcptr_map.end()) {
673 return reinterpret_cast<PFN_vkVoidFunction>(item->second);
675 // Mock should intercept all functions so if we get here just return null
678 'vkGetDeviceProcAddr': '''
679 return GetInstanceProcAddr(nullptr, pName);
681 'vkGetPhysicalDeviceMemoryProperties': '''
682 pMemoryProperties->memoryTypeCount = 2;
683 pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
684 pMemoryProperties->memoryTypes[0].heapIndex = 0;
685 pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
686 pMemoryProperties->memoryTypes[1].heapIndex = 1;
687 pMemoryProperties->memoryHeapCount = 2;
688 pMemoryProperties->memoryHeaps[0].flags = 0;
689 pMemoryProperties->memoryHeaps[0].size = 8000000000;
690 pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
691 pMemoryProperties->memoryHeaps[1].size = 8000000000;
693 'vkGetPhysicalDeviceMemoryProperties2KHR': '''
694 GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
696 'vkGetPhysicalDeviceQueueFamilyProperties': '''
697 if (!pQueueFamilyProperties) {
698 *pQueueFamilyPropertyCount = 1;
700 if (*pQueueFamilyPropertyCount) {
701 pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
702 pQueueFamilyProperties[0].queueCount = 1;
703 pQueueFamilyProperties[0].timestampValidBits = 0;
704 pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1};
708 'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
709 if (pQueueFamilyPropertyCount && pQueueFamilyProperties) {
710 GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties);
712 GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr);
715 'vkGetPhysicalDeviceFeatures': '''
716 uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
717 VkBool32 *bool_array = &pFeatures->robustBufferAccess;
718 SetBoolArrayTrue(bool_array, num_bools);
720 'vkGetPhysicalDeviceFeatures2KHR': '''
721 GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
722 uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
723 VkBool32* feat_bools = nullptr;
724 const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
725 if (desc_idx_features) {
726 const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
727 num_bools = bool_size/sizeof(VkBool32);
728 feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
729 SetBoolArrayTrue(feat_bools, num_bools);
731 const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
732 if (blendop_features) {
733 const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
734 num_bools = bool_size/sizeof(VkBool32);
735 feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
736 SetBoolArrayTrue(feat_bools, num_bools);
739 'vkGetPhysicalDeviceFormatProperties': '''
740 if (VK_FORMAT_UNDEFINED == format) {
741 *pFormatProperties = { 0x0, 0x0, 0x0 };
743 // TODO: Just returning full support for everything initially
744 *pFormatProperties = { 0x00FFFFFF, 0x00FFFFFF, 0x00FFFFFF };
747 'vkGetPhysicalDeviceFormatProperties2KHR': '''
748 GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
750 'vkGetPhysicalDeviceImageFormatProperties': '''
751 // A hardcoded unsupported format
752 if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
753 return VK_ERROR_FORMAT_NOT_SUPPORTED;
756 // TODO: Just hard-coding some values for now
757 // TODO: If tiling is linear, limit the mips, levels, & sample count
758 if (VK_IMAGE_TILING_LINEAR == tiling) {
759 *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
761 // We hard-code support for all sample counts except 64 bits.
762 *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
766 'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
767 GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
770 'vkGetPhysicalDeviceProperties': '''
771 // TODO: Just hard-coding some values for now
772 pProperties->apiVersion = VK_API_VERSION_1_1;
773 pProperties->driverVersion = 1;
774 pProperties->vendorID = 0xba5eba11;
775 pProperties->deviceID = 0xf005ba11;
776 pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
777 //std::string devName = "Vulkan Mock Device";
778 strcpy(pProperties->deviceName, "Vulkan Mock Device");
779 pProperties->pipelineCacheUUID[0] = 18;
780 pProperties->limits = SetLimits(&pProperties->limits);
781 pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
783 'vkGetPhysicalDeviceProperties2KHR': '''
784 GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
785 const auto *desc_idx_props = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>(pProperties->pNext);
786 if (desc_idx_props) {
787 VkPhysicalDeviceDescriptorIndexingPropertiesEXT* write_props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)desc_idx_props;
788 write_props->maxUpdateAfterBindDescriptorsInAllPools = 500000;
789 write_props->shaderUniformBufferArrayNonUniformIndexingNative = false;
790 write_props->shaderSampledImageArrayNonUniformIndexingNative = false;
791 write_props->shaderStorageBufferArrayNonUniformIndexingNative = false;
792 write_props->shaderStorageImageArrayNonUniformIndexingNative = false;
793 write_props->shaderInputAttachmentArrayNonUniformIndexingNative = false;
794 write_props->robustBufferAccessUpdateAfterBind = true;
795 write_props->quadDivergentImplicitLod = true;
796 write_props->maxPerStageDescriptorUpdateAfterBindSamplers = 500000;
797 write_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers = 500000;
798 write_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers = 500000;
799 write_props->maxPerStageDescriptorUpdateAfterBindSampledImages = 500000;
800 write_props->maxPerStageDescriptorUpdateAfterBindStorageImages = 500000;
801 write_props->maxPerStageDescriptorUpdateAfterBindInputAttachments = 500000;
802 write_props->maxPerStageUpdateAfterBindResources = 500000;
803 write_props->maxDescriptorSetUpdateAfterBindSamplers = 500000;
804 write_props->maxDescriptorSetUpdateAfterBindUniformBuffers = 96;
805 write_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 8;
806 write_props->maxDescriptorSetUpdateAfterBindStorageBuffers = 500000;
807 write_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 4;
808 write_props->maxDescriptorSetUpdateAfterBindSampledImages = 500000;
809 write_props->maxDescriptorSetUpdateAfterBindStorageImages = 500000;
810 write_props->maxDescriptorSetUpdateAfterBindInputAttachments = 500000;
813 const auto *push_descriptor_props = lvl_find_in_chain<VkPhysicalDevicePushDescriptorPropertiesKHR>(pProperties->pNext);
814 if (push_descriptor_props) {
815 VkPhysicalDevicePushDescriptorPropertiesKHR* write_props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)push_descriptor_props;
816 write_props->maxPushDescriptors = 256;
819 const auto *depth_stencil_resolve_props = lvl_find_in_chain<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>(pProperties->pNext);
820 if (depth_stencil_resolve_props) {
821 VkPhysicalDeviceDepthStencilResolvePropertiesKHR* write_props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)depth_stencil_resolve_props;
822 write_props->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
823 write_props->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
826 'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
827 // Hard code support for all handle types and features
828 pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
829 pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
830 pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
832 'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
833 GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
835 'vkGetPhysicalDeviceExternalFenceProperties':'''
836 // Hard-code support for all handle types and features
837 pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
838 pExternalFenceProperties->compatibleHandleTypes = 0xF;
839 pExternalFenceProperties->externalFenceFeatures = 0x3;
841 'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
842 GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
844 'vkGetPhysicalDeviceExternalBufferProperties':'''
845 // Hard-code support for all handle types and features
846 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
847 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0x1FF;
848 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0x1FF;
850 'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
851 GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
853 'vkGetBufferMemoryRequirements': '''
854 // TODO: Just hard-coding reqs for now
855 pMemoryRequirements->size = 4096;
856 pMemoryRequirements->alignment = 1;
857 pMemoryRequirements->memoryTypeBits = 0xFFFF;
858 // Return a better size based on the buffer size from the create info.
859 auto d_iter = buffer_map.find(device);
860 if (d_iter != buffer_map.end()) {
861 auto iter = d_iter->second.find(buffer);
862 if (iter != d_iter->second.end()) {
863 pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
867 'vkGetBufferMemoryRequirements2KHR': '''
868 GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
870 'vkGetImageMemoryRequirements': '''
871 pMemoryRequirements->size = 0;
872 pMemoryRequirements->alignment = 1;
874 auto d_iter = image_memory_size_map.find(device);
875 if(d_iter != image_memory_size_map.end()){
876 auto iter = d_iter->second.find(image);
877 if (iter != d_iter->second.end()) {
878 pMemoryRequirements->size = iter->second;
881 // Here we hard-code that the memory type at index 3 doesn't support this image.
882 pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
884 'vkGetImageMemoryRequirements2KHR': '''
885 GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
888 unique_lock_t lock(global_lock);
889 if (VK_WHOLE_SIZE == size) {
890 if (allocated_memory_size_map.count(memory) != 0)
891 size = allocated_memory_size_map[memory] - offset;
895 void* map_addr = malloc((size_t)size);
896 mapped_memory_map[memory].push_back(map_addr);
901 unique_lock_t lock(global_lock);
902 for (auto map_addr : mapped_memory_map[memory]) {
905 mapped_memory_map.erase(memory);
907 'vkGetImageSubresourceLayout': '''
908 // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure.
909 *pLayout = VkSubresourceLayout(); // Default constructor zero values.
911 'vkCreateSwapchainKHR': '''
912 unique_lock_t lock(global_lock);
913 *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
914 for(uint32_t i = 0; i < icd_swapchain_image_count; ++i){
915 swapchain_image_map[*pSwapchain][i] = (VkImage)global_unique_handle++;
919 'vkDestroySwapchainKHR': '''
920 unique_lock_t lock(global_lock);
921 swapchain_image_map.clear();
923 'vkGetSwapchainImagesKHR': '''
924 if (!pSwapchainImages) {
925 *pSwapchainImageCount = icd_swapchain_image_count;
927 unique_lock_t lock(global_lock);
928 for (uint32_t img_i = 0; img_i < (std::min)(*pSwapchainImageCount, icd_swapchain_image_count); ++img_i){
929 pSwapchainImages[img_i] = swapchain_image_map.at(swapchain)[img_i];
932 if (*pSwapchainImageCount < icd_swapchain_image_count) return VK_INCOMPLETE;
933 else if (*pSwapchainImageCount > icd_swapchain_image_count) *pSwapchainImageCount = icd_swapchain_image_count;
937 'vkAcquireNextImageKHR': '''
941 'vkAcquireNextImage2KHR': '''
945 'vkCreateBuffer': '''
946 unique_lock_t lock(global_lock);
947 *pBuffer = (VkBuffer)global_unique_handle++;
948 buffer_map[device][*pBuffer] = *pCreateInfo;
951 'vkDestroyBuffer': '''
952 unique_lock_t lock(global_lock);
953 buffer_map[device].erase(buffer);
956 unique_lock_t lock(global_lock);
957 *pImage = (VkImage)global_unique_handle++;
958 // TODO: A pixel size is 32 bytes. This accounts for the largest possible pixel size of any format. It could be changed to more accurate size if need be.
959 image_memory_size_map[device][*pImage] = pCreateInfo->extent.width * pCreateInfo->extent.height * pCreateInfo->extent.depth *
960 32 * pCreateInfo->arrayLayers * (pCreateInfo->mipLevels > 1 ? 2 : 1);
962 switch (pCreateInfo->format) {
963 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
964 case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
965 case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
966 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
967 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
968 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
969 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
970 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
971 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
972 case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
973 case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
974 case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
975 image_memory_size_map[device][*pImage] *= 3;
977 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
978 case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
979 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
980 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
981 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
982 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
983 case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
984 case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
985 image_memory_size_map[device][*pImage] *= 2;
992 'vkDestroyImage': '''
993 unique_lock_t lock(global_lock);
994 image_memory_size_map[device].erase(image);
998 # MockICDGeneratorOptions - subclass of GeneratorOptions.
1000 # Adds options used by MockICDOutputGenerator objects during Mock
1003 # Additional members
1004 # prefixText - list of strings to prefix generated header with
1005 # (usually a copyright statement + calling convention macros).
1006 # protectFile - True if multiple inclusion protection should be
1007 # generated (based on the filename) around the entire header.
1008 # protectFeature - True if #ifndef..#endif protection should be
1009 # generated around a feature interface in the header file.
1010 # genFuncPointers - True if function pointer typedefs should be
1012 # protectProto - If conditional protection should be generated
1013 # around prototype declarations, set to either '#ifdef'
1014 # to require opt-in (#ifdef protectProtoStr) or '#ifndef'
1015 # to require opt-out (#ifndef protectProtoStr). Otherwise
1017 # protectProtoStr - #ifdef/#ifndef symbol to use around prototype
1018 # declarations, if protectProto is set
1019 # apicall - string to use for the function declaration prefix,
1020 # such as APICALL on Windows.
1021 # apientry - string to use for the calling convention macro,
1022 # in typedefs, such as APIENTRY.
1023 # apientryp - string to use for the calling convention macro
1024 # in function pointer typedefs, such as APIENTRYP.
1025 # indentFuncProto - True if prototype declarations should put each
1026 # parameter on a separate line
1027 # indentFuncPointer - True if typedefed function pointers should put each
1028 # parameter on a separate line
1029 # alignFuncParam - if nonzero and parameters are being put on a
1030 # separate line, align parameter names at the specified column
1031 class MockICDGeneratorOptions(GeneratorOptions):
1039 emitversions = '.*',
1040 defaultExtensions = None,
1041 addExtensions = None,
1042 removeExtensions = None,
1043 emitExtensions = None,
1044 sortProcedure = regSortFeatures,
1046 genFuncPointers = True,
1048 protectFeature = True,
1049 protectProto = None,
1050 protectProtoStr = None,
1054 indentFuncProto = True,
1055 indentFuncPointer = False,
1057 expandEnumerants = True,
1058 helper_file_type = ''):
1059 GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
1060 versions, emitversions, defaultExtensions,
1061 addExtensions, removeExtensions, emitExtensions, sortProcedure)
1062 self.prefixText = prefixText
1063 self.genFuncPointers = genFuncPointers
1064 self.protectFile = protectFile
1065 self.protectFeature = protectFeature
1066 self.protectProto = protectProto
1067 self.protectProtoStr = protectProtoStr
1068 self.apicall = apicall
1069 self.apientry = apientry
1070 self.apientryp = apientryp
1071 self.indentFuncProto = indentFuncProto
1072 self.indentFuncPointer = indentFuncPointer
1073 self.alignFuncParam = alignFuncParam
1075 # MockICDOutputGenerator - subclass of OutputGenerator.
1076 # Generates a mock vulkan ICD.
1077 # This is intended to be a minimal replacement for a vulkan device in order
1078 # to enable Vulkan Validation testing.
1081 # MockOutputGenerator(errFile, warnFile, diagFile) - args as for
1082 # OutputGenerator. Defines additional internal state.
1083 # ---- methods overriding base class ----
1084 # beginFile(genOpts)
1086 # beginFeature(interface, emit)
1088 # genType(typeinfo,name)
1089 # genStruct(typeinfo,name)
1090 # genGroup(groupinfo,name)
1091 # genEnum(enuminfo, name)
1093 class MockICDOutputGenerator(OutputGenerator):
1094 """Generate specified API interfaces in a specific style, such as a C header"""
1095 # This is an ordered list of sections in the header file.
1096 TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
1097 'group', 'bitmask', 'funcpointer', 'struct']
1098 ALL_SECTIONS = TYPE_SECTIONS + ['command']
1100 errFile = sys.stderr,
1101 warnFile = sys.stderr,
1102 diagFile = sys.stdout):
1103 OutputGenerator.__init__(self, errFile, warnFile, diagFile)
1104 # Internal state - accumulators for different inner block text
1105 self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1106 self.intercepts = []
1108 # Check if the parameter passed in is a pointer to an array
1109 def paramIsArray(self, param):
1110 return param.attrib.get('len') is not None
1112 # Check if the parameter passed in is a pointer
1113 def paramIsPointer(self, param):
1116 if ((elem.tag != 'type') and (elem.tail is not None)) and '*' in elem.tail:
1120 # Check if an object is a non-dispatchable handle
1121 def isHandleTypeNonDispatchable(self, handletype):
1122 handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1123 if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
1128 # Check if an object is a dispatchable handle
1129 def isHandleTypeDispatchable(self, handletype):
1130 handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1131 if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
1136 def beginFile(self, genOpts):
1137 OutputGenerator.beginFile(self, genOpts)
1140 # Multiple inclusion protection & C++ namespace.
1142 if (genOpts.protectFile and self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
1144 headerSym = '__' + re.sub(r'\.h', '_h_', os.path.basename(self.genOpts.filename))
1145 write('#ifndef', headerSym, file=self.outFile)
1146 write('#define', headerSym, '1', file=self.outFile)
1149 # User-supplied prefix text, if any (list of strings)
1150 if (genOpts.prefixText):
1151 for s in genOpts.prefixText:
1152 write(s, file=self.outFile)
1154 write('#include <unordered_map>', file=self.outFile)
1155 write('#include <mutex>', file=self.outFile)
1156 write('#include <string>', file=self.outFile)
1157 write('#include <cstring>', file=self.outFile)
1158 write('#include "vulkan/vk_icd.h"', file=self.outFile)
1160 write('#include "mock_icd.h"', file=self.outFile)
1161 write('#include <stdlib.h>', file=self.outFile)
1162 write('#include <algorithm>', file=self.outFile)
1163 write('#include <array>', file=self.outFile)
1164 write('#include <vector>', file=self.outFile)
1165 write('#include "vk_typemap_helper.h"', file=self.outFile)
1167 write('namespace vkmock {', file=self.outFile)
1170 write(HEADER_C_CODE, file=self.outFile)
1171 # Include all of the extensions in ICD except specific ignored ones
1174 # Ignore extensions that ICDs should not implement or are not safe to report
1175 ignore_exts = ['VK_EXT_validation_cache']
1176 for ext in self.registry.tree.findall("extensions/extension"):
1177 if ext.attrib['supported'] != 'disabled': # Only include enabled extensions
1178 if (ext.attrib['name'] in ignore_exts):
1180 elif (ext.attrib.get('type') and 'instance' == ext.attrib['type']):
1181 instance_exts.append(' {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1183 device_exts.append(' {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1184 write('// Map of instance extension name to version', file=self.outFile)
1185 write('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {', file=self.outFile)
1186 write('\n'.join(instance_exts), file=self.outFile)
1187 write('};', file=self.outFile)
1188 write('// Map of device extension name to version', file=self.outFile)
1189 write('static const std::unordered_map<std::string, uint32_t> device_extension_map = {', file=self.outFile)
1190 write('\n'.join(device_exts), file=self.outFile)
1191 write('};', file=self.outFile)
1195 write(SOURCE_CPP_PREFIX, file=self.outFile)
1199 # Finish C++ namespace and multiple inclusion protection
1202 # record intercepted procedures
1203 write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
1204 write('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
1205 write('\n'.join(self.intercepts), file=self.outFile)
1206 write('};\n', file=self.outFile)
1208 write('} // namespace vkmock', file=self.outFile)
1210 write('#endif', file=self.outFile)
1211 else: # Loader-layer-interface, need to implement global interface functions
1212 write(SOURCE_CPP_POSTFIX, file=self.outFile)
1213 # Finish processing in superclass
1214 OutputGenerator.endFile(self)
1215 def beginFeature(self, interface, emit):
1216 #write('// starting beginFeature', file=self.outFile)
1217 # Start processing in superclass
1218 OutputGenerator.beginFeature(self, interface, emit)
1219 self.featureExtraProtect = GetFeatureProtect(interface)
1221 # Accumulate includes, defines, types, enums, function pointer typedefs,
1222 # end function prototypes separately for this feature. They're only
1223 # printed in endFeature().
1224 self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1225 #write('// ending beginFeature', file=self.outFile)
1226 def endFeature(self):
1228 # Actually write the interface to the output file.
1229 #write('// starting endFeature', file=self.outFile)
1232 if (self.genOpts.protectFeature):
1233 write('#ifndef', self.featureName, file=self.outFile)
1234 # If type declarations are needed by other features based on
1235 # this one, it may be necessary to suppress the ExtraProtect,
1236 # or move it below the 'for section...' loop.
1237 #write('// endFeature looking at self.featureExtraProtect', file=self.outFile)
1238 if (self.featureExtraProtect != None):
1239 write('#ifdef', self.featureExtraProtect, file=self.outFile)
1240 #write('#define', self.featureName, '1', file=self.outFile)
1241 for section in self.TYPE_SECTIONS:
1242 #write('// endFeature writing section'+section, file=self.outFile)
1243 contents = self.sections[section]
1245 write('\n'.join(contents), file=self.outFile)
1247 #write('// endFeature looking at self.sections[command]', file=self.outFile)
1248 if (self.sections['command']):
1249 write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
1251 if (self.featureExtraProtect != None):
1252 write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile)
1253 if (self.genOpts.protectFeature):
1254 write('#endif /*', self.featureName, '*/', file=self.outFile)
1255 # Finish processing in superclass
1256 OutputGenerator.endFeature(self)
1257 #write('// ending endFeature', file=self.outFile)
1259 # Append a definition to the specified section
1260 def appendSection(self, section, text):
1261 # self.sections[section].append('SECTION: ' + section + '\n')
1262 self.sections[section].append(text)
1265 def genType(self, typeinfo, name, alias):
1268 # Struct (e.g. C "struct" type) generation.
1269 # This is a special case of the <type> tag where the contents are
1270 # interpreted as a set of <member> tags instead of freeform C
1271 # C type declarations. The <member> tags are just like <param>
1272 # tags - they are a declaration of a struct or union member.
1273 # Only simple member declarations are supported (no nested
1275 def genStruct(self, typeinfo, typeName, alias):
1276 OutputGenerator.genStruct(self, typeinfo, typeName, alias)
1277 body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
1278 # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
1279 for member in typeinfo.elem.findall('.//member'):
1280 body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
1282 body += '} ' + typeName + ';\n'
1283 self.appendSection('struct', body)
1285 # Group (e.g. C "enum" type) generation.
1286 # These are concatenated together with other types.
1287 def genGroup(self, groupinfo, groupName, alias):
1289 # Enumerant generation
1290 # <enum> tags may specify their values in several ways, but are usually
1292 def genEnum(self, enuminfo, name, alias):
1295 # Command generation
1296 def genCmd(self, cmdinfo, name, alias):
1297 decls = self.makeCDecls(cmdinfo.elem)
1298 if self.header: # In the header declare all intercepts
1299 self.appendSection('command', '')
1300 self.appendSection('command', 'static %s' % (decls[0]))
1301 if (self.featureExtraProtect != None):
1302 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1303 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
1304 if (self.featureExtraProtect != None):
1305 self.intercepts += [ '#endif' ]
1308 manual_functions = [
1309 # Include functions here to be intercepted w/ manually implemented function bodies
1310 'vkGetDeviceProcAddr',
1311 'vkGetInstanceProcAddr',
1315 'vkDestroyInstance',
1316 #'vkCreateDebugReportCallbackEXT',
1317 #'vkDestroyDebugReportCallbackEXT',
1318 'vkEnumerateInstanceLayerProperties',
1319 'vkEnumerateInstanceVersion',
1320 'vkEnumerateInstanceExtensionProperties',
1321 'vkEnumerateDeviceLayerProperties',
1322 'vkEnumerateDeviceExtensionProperties',
1324 if name in manual_functions:
1325 self.appendSection('command', '')
1326 if name not in CUSTOM_C_INTERCEPTS:
1327 self.appendSection('command', '// declare only')
1328 self.appendSection('command', 'static %s' % (decls[0]))
1329 self.appendSection('command', '// TODO: Implement custom intercept body')
1331 self.appendSection('command', 'static %s' % (decls[0][:-1]))
1332 self.appendSection('command', '{\n%s}' % (CUSTOM_C_INTERCEPTS[name]))
1333 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
1335 # record that the function will be intercepted
1336 if (self.featureExtraProtect != None):
1337 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1338 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
1339 if (self.featureExtraProtect != None):
1340 self.intercepts += [ '#endif' ]
1342 OutputGenerator.genCmd(self, cmdinfo, name, alias)
1344 self.appendSection('command', '')
1345 self.appendSection('command', 'static %s' % (decls[0][:-1]))
1346 if name in CUSTOM_C_INTERCEPTS:
1347 self.appendSection('command', '{%s}' % (CUSTOM_C_INTERCEPTS[name]))
1350 # Declare result variable, if any.
1351 resulttype = cmdinfo.elem.find('proto/type')
1352 if (resulttype != None and resulttype.text == 'void'):
1354 # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
1355 # Call the KHR custom version instead of generating separate code
1356 khr_name = name + "KHR"
1357 if khr_name in CUSTOM_C_INTERCEPTS:
1359 if resulttype != None:
1360 return_string = 'return '
1361 params = cmdinfo.elem.findall('param/name')
1363 for param in params:
1364 param_names.append(param.text)
1365 self.appendSection('command', '{\n %s%s(%s);\n}' % (return_string, khr_name[2:], ", ".join(param_names)))
1367 self.appendSection('command', '{')
1369 api_function_name = cmdinfo.elem.attrib.get('name')
1370 # GET THE TYPE OF FUNCTION
1371 if True in [ftxt in api_function_name for ftxt in ['Create', 'Allocate']]:
1373 last_param = cmdinfo.elem.findall('param')[-1]
1374 lp_txt = last_param.find('name').text
1376 if ('len' in last_param.attrib):
1377 lp_len = last_param.attrib['len']
1378 lp_len = lp_len.replace('::', '->')
1379 lp_type = last_param.find('type').text
1380 handle_type = 'dispatchable'
1381 allocator_txt = 'CreateDispObjHandle()';
1382 if (self.isHandleTypeNonDispatchable(lp_type)):
1383 handle_type = 'non-' + handle_type
1384 allocator_txt = 'global_unique_handle++';
1385 # Need to lock in both cases
1386 self.appendSection('command', ' unique_lock_t lock(global_lock);')
1387 if (lp_len != None):
1388 #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
1389 self.appendSection('command', ' for (uint32_t i = 0; i < %s; ++i) {' % (lp_len))
1390 self.appendSection('command', ' %s[i] = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1391 self.appendSection('command', ' }')
1393 #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
1394 if 'AllocateMemory' in api_function_name:
1395 # Store allocation size in case it's mapped
1396 self.appendSection('command', ' allocated_memory_size_map[(VkDeviceMemory)global_unique_handle] = pAllocateInfo->allocationSize;')
1397 self.appendSection('command', ' *%s = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1398 elif True in [ftxt in api_function_name for ftxt in ['Destroy', 'Free']]:
1399 self.appendSection('command', '//Destroy object')
1400 if 'FreeMemory' in api_function_name:
1401 # Remove from allocation map
1402 self.appendSection('command', ' allocated_memory_size_map.erase(memory);')
1404 self.appendSection('command', '//Not a CREATE or DESTROY function')
1406 # Return result variable, if any.
1407 if (resulttype != None):
1408 if api_function_name == 'vkGetEventStatus':
1409 self.appendSection('command', ' return VK_EVENT_SET;')
1411 self.appendSection('command', ' return VK_SUCCESS;')
1412 self.appendSection('command', '}')
1414 # override makeProtoName to drop the "vk" prefix
1415 def makeProtoName(self, name, tail):
1416 return self.genOpts.apientry + name[2:] + tail