3 # Copyright (c) 2015-2017 The Khronos Group Inc.
4 # Copyright (c) 2015-2017 Valve Corporation
5 # Copyright (c) 2015-2017 LunarG, Inc.
6 # Copyright (c) 2015-2017 Google Inc.
8 # Licensed under the Apache License, Version 2.0 (the "License");
9 # you may not use this file except in compliance with the License.
10 # You may obtain a copy of the License at
12 # http://www.apache.org/licenses/LICENSE-2.0
14 # Unless required by applicable law or agreed to in writing, software
15 # distributed under the License is distributed on an "AS IS" BASIS,
16 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # See the License for the specific language governing permissions and
18 # limitations under the License.
20 # Author: Tobin Ehlis <tobine@google.com>
22 # This script generates a Mock ICD that intercepts almost all Vulkan
23 # functions. That layer is not intended to be useful or even compilable
24 # in its initial state. Rather it's intended to be a starting point that
25 # can be copied and customized to assist in creation of a new layer.
28 from generator import *
29 from common_codegen import *
34 using mutex_t = std::mutex;
35 using lock_guard_t = std::lock_guard<mutex_t>;
36 using unique_lock_t = std::unique_lock<mutex_t>;
38 static mutex_t global_lock;
39 static uint64_t global_unique_handle = 1;
40 static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5;
41 static uint32_t loader_interface_version = 0;
42 static bool negotiate_loader_icd_interface_called = false;
43 static void* CreateDispObjHandle() {
44 auto handle = new VK_LOADER_DATA;
45 set_loader_magic_value(handle);
48 static void DestroyDispObjHandle(void* handle) {
49 delete reinterpret_cast<VK_LOADER_DATA*>(handle);
53 # Manual code at the top of the cpp source file
54 SOURCE_CPP_PREFIX = '''
55 using std::unordered_map;
57 static constexpr uint32_t icd_physical_device_count = 1;
58 static constexpr uint32_t kSupportedVulkanAPIVersion = VK_API_VERSION_1_1;
59 static unordered_map<VkInstance, std::array<VkPhysicalDevice, icd_physical_device_count>> physical_device_map;
61 // Map device memory handle to any mapped allocations that we'll need to free on unmap
62 static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map;
64 // Map device memory allocation handle to the size
65 static unordered_map<VkDeviceMemory, VkDeviceSize> allocated_memory_size_map;
67 static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map;
68 static unordered_map<VkDevice, unordered_map<VkBuffer, VkBufferCreateInfo>> buffer_map;
69 static unordered_map<VkDevice, unordered_map<VkImage, VkDeviceSize>> image_memory_size_map;
71 static constexpr uint32_t icd_swapchain_image_count = 1;
72 static unordered_map<VkSwapchainKHR, VkImage[icd_swapchain_image_count]> swapchain_image_map;
74 // TODO: Would like to codegen this but limits aren't in XML
75 static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) {
76 limits->maxImageDimension1D = 4096;
77 limits->maxImageDimension2D = 4096;
78 limits->maxImageDimension3D = 256;
79 limits->maxImageDimensionCube = 4096;
80 limits->maxImageArrayLayers = 256;
81 limits->maxTexelBufferElements = 65536;
82 limits->maxUniformBufferRange = 16384;
83 limits->maxStorageBufferRange = 134217728;
84 limits->maxPushConstantsSize = 128;
85 limits->maxMemoryAllocationCount = 4096;
86 limits->maxSamplerAllocationCount = 4000;
87 limits->bufferImageGranularity = 1;
88 limits->sparseAddressSpaceSize = 2147483648;
89 limits->maxBoundDescriptorSets = 4;
90 limits->maxPerStageDescriptorSamplers = 16;
91 limits->maxPerStageDescriptorUniformBuffers = 12;
92 limits->maxPerStageDescriptorStorageBuffers = 4;
93 limits->maxPerStageDescriptorSampledImages = 16;
94 limits->maxPerStageDescriptorStorageImages = 4;
95 limits->maxPerStageDescriptorInputAttachments = 4;
96 limits->maxPerStageResources = 128;
97 limits->maxDescriptorSetSamplers = 96;
98 limits->maxDescriptorSetUniformBuffers = 72;
99 limits->maxDescriptorSetUniformBuffersDynamic = 8;
100 limits->maxDescriptorSetStorageBuffers = 24;
101 limits->maxDescriptorSetStorageBuffersDynamic = 4;
102 limits->maxDescriptorSetSampledImages = 96;
103 limits->maxDescriptorSetStorageImages = 24;
104 limits->maxDescriptorSetInputAttachments = 4;
105 limits->maxVertexInputAttributes = 16;
106 limits->maxVertexInputBindings = 16;
107 limits->maxVertexInputAttributeOffset = 2047;
108 limits->maxVertexInputBindingStride = 2048;
109 limits->maxVertexOutputComponents = 64;
110 limits->maxTessellationGenerationLevel = 64;
111 limits->maxTessellationPatchSize = 32;
112 limits->maxTessellationControlPerVertexInputComponents = 64;
113 limits->maxTessellationControlPerVertexOutputComponents = 64;
114 limits->maxTessellationControlPerPatchOutputComponents = 120;
115 limits->maxTessellationControlTotalOutputComponents = 2048;
116 limits->maxTessellationEvaluationInputComponents = 64;
117 limits->maxTessellationEvaluationOutputComponents = 64;
118 limits->maxGeometryShaderInvocations = 32;
119 limits->maxGeometryInputComponents = 64;
120 limits->maxGeometryOutputComponents = 64;
121 limits->maxGeometryOutputVertices = 256;
122 limits->maxGeometryTotalOutputComponents = 1024;
123 limits->maxFragmentInputComponents = 64;
124 limits->maxFragmentOutputAttachments = 4;
125 limits->maxFragmentDualSrcAttachments = 1;
126 limits->maxFragmentCombinedOutputResources = 4;
127 limits->maxComputeSharedMemorySize = 16384;
128 limits->maxComputeWorkGroupCount[0] = 65535;
129 limits->maxComputeWorkGroupCount[1] = 65535;
130 limits->maxComputeWorkGroupCount[2] = 65535;
131 limits->maxComputeWorkGroupInvocations = 128;
132 limits->maxComputeWorkGroupSize[0] = 128;
133 limits->maxComputeWorkGroupSize[1] = 128;
134 limits->maxComputeWorkGroupSize[2] = 64;
135 limits->subPixelPrecisionBits = 4;
136 limits->subTexelPrecisionBits = 4;
137 limits->mipmapPrecisionBits = 4;
138 limits->maxDrawIndexedIndexValue = UINT32_MAX;
139 limits->maxDrawIndirectCount = UINT16_MAX;
140 limits->maxSamplerLodBias = 2.0f;
141 limits->maxSamplerAnisotropy = 16;
142 limits->maxViewports = 16;
143 limits->maxViewportDimensions[0] = 4096;
144 limits->maxViewportDimensions[1] = 4096;
145 limits->viewportBoundsRange[0] = -8192;
146 limits->viewportBoundsRange[1] = 8191;
147 limits->viewportSubPixelBits = 0;
148 limits->minMemoryMapAlignment = 64;
149 limits->minTexelBufferOffsetAlignment = 16;
150 limits->minUniformBufferOffsetAlignment = 16;
151 limits->minStorageBufferOffsetAlignment = 16;
152 limits->minTexelOffset = -8;
153 limits->maxTexelOffset = 7;
154 limits->minTexelGatherOffset = -8;
155 limits->maxTexelGatherOffset = 7;
156 limits->minInterpolationOffset = 0.0f;
157 limits->maxInterpolationOffset = 0.5f;
158 limits->subPixelInterpolationOffsetBits = 4;
159 limits->maxFramebufferWidth = 4096;
160 limits->maxFramebufferHeight = 4096;
161 limits->maxFramebufferLayers = 256;
162 limits->framebufferColorSampleCounts = 0x7F;
163 limits->framebufferDepthSampleCounts = 0x7F;
164 limits->framebufferStencilSampleCounts = 0x7F;
165 limits->framebufferNoAttachmentsSampleCounts = 0x7F;
166 limits->maxColorAttachments = 4;
167 limits->sampledImageColorSampleCounts = 0x7F;
168 limits->sampledImageIntegerSampleCounts = 0x7F;
169 limits->sampledImageDepthSampleCounts = 0x7F;
170 limits->sampledImageStencilSampleCounts = 0x7F;
171 limits->storageImageSampleCounts = 0x7F;
172 limits->maxSampleMaskWords = 1;
173 limits->timestampComputeAndGraphics = VK_TRUE;
174 limits->timestampPeriod = 1;
175 limits->maxClipDistances = 8;
176 limits->maxCullDistances = 8;
177 limits->maxCombinedClipAndCullDistances = 8;
178 limits->discreteQueuePriorities = 2;
179 limits->pointSizeRange[0] = 1.0f;
180 limits->pointSizeRange[1] = 64.0f;
181 limits->lineWidthRange[0] = 1.0f;
182 limits->lineWidthRange[1] = 8.0f;
183 limits->pointSizeGranularity = 1.0f;
184 limits->lineWidthGranularity = 1.0f;
185 limits->strictLines = VK_TRUE;
186 limits->standardSampleLocations = VK_TRUE;
187 limits->optimalBufferCopyOffsetAlignment = 1;
188 limits->optimalBufferCopyRowPitchAlignment = 1;
189 limits->nonCoherentAtomSize = 256;
194 void SetBoolArrayTrue(VkBool32* bool_array, uint32_t num_bools)
196 for (uint32_t i = 0; i < num_bools; ++i) {
197 bool_array[i] = VK_TRUE;
202 # Manual code at the end of the cpp source file
203 SOURCE_CPP_POSTFIX = '''
205 static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
206 // TODO: This function should only care about physical device functions and return nullptr for other functions
207 const auto &item = name_to_funcptr_map.find(funcName);
208 if (item != name_to_funcptr_map.end()) {
209 return reinterpret_cast<PFN_vkVoidFunction>(item->second);
211 // Mock should intercept all functions so if we get here just return null
215 } // namespace vkmock
217 #if defined(__GNUC__) && __GNUC__ >= 4
218 #define EXPORT __attribute__((visibility("default")))
219 #elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
220 #define EXPORT __attribute__((visibility("default")))
227 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) {
228 if (!vkmock::negotiate_loader_icd_interface_called) {
229 vkmock::loader_interface_version = 1;
231 return vkmock::GetInstanceProcAddr(instance, pName);
234 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) {
235 return vkmock::GetPhysicalDeviceProcAddr(instance, pName);
238 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) {
239 vkmock::negotiate_loader_icd_interface_called = true;
240 vkmock::loader_interface_version = *pSupportedVersion;
241 if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
242 *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION;
248 EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
250 VkSurfaceKHR surface,
251 const VkAllocationCallbacks* pAllocator)
253 vkmock::DestroySurfaceKHR(instance, surface, pAllocator);
256 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
257 VkPhysicalDevice physicalDevice,
258 uint32_t queueFamilyIndex,
259 VkSurfaceKHR surface,
260 VkBool32* pSupported)
262 return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
265 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
266 VkPhysicalDevice physicalDevice,
267 VkSurfaceKHR surface,
268 VkSurfaceCapabilitiesKHR* pSurfaceCapabilities)
270 return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
273 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
274 VkPhysicalDevice physicalDevice,
275 VkSurfaceKHR surface,
276 uint32_t* pSurfaceFormatCount,
277 VkSurfaceFormatKHR* pSurfaceFormats)
279 return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
282 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
283 VkPhysicalDevice physicalDevice,
284 VkSurfaceKHR surface,
285 uint32_t* pPresentModeCount,
286 VkPresentModeKHR* pPresentModes)
288 return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
291 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
293 const VkDisplaySurfaceCreateInfoKHR* pCreateInfo,
294 const VkAllocationCallbacks* pAllocator,
295 VkSurfaceKHR* pSurface)
297 return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
300 #ifdef VK_USE_PLATFORM_XLIB_KHR
302 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
304 const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
305 const VkAllocationCallbacks* pAllocator,
306 VkSurfaceKHR* pSurface)
308 return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
310 #endif /* VK_USE_PLATFORM_XLIB_KHR */
312 #ifdef VK_USE_PLATFORM_XCB_KHR
314 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
316 const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
317 const VkAllocationCallbacks* pAllocator,
318 VkSurfaceKHR* pSurface)
320 return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
322 #endif /* VK_USE_PLATFORM_XCB_KHR */
324 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
326 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
328 const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
329 const VkAllocationCallbacks* pAllocator,
330 VkSurfaceKHR* pSurface)
332 return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
334 #endif /* VK_USE_PLATFORM_WAYLAND_KHR */
336 #ifdef VK_USE_PLATFORM_ANDROID_KHR
338 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
340 const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
341 const VkAllocationCallbacks* pAllocator,
342 VkSurfaceKHR* pSurface)
344 return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
346 #endif /* VK_USE_PLATFORM_ANDROID_KHR */
348 #ifdef VK_USE_PLATFORM_WIN32_KHR
350 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
352 const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
353 const VkAllocationCallbacks* pAllocator,
354 VkSurfaceKHR* pSurface)
356 return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
358 #endif /* VK_USE_PLATFORM_WIN32_KHR */
360 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
362 VkSurfaceKHR surface,
363 VkDeviceGroupPresentModeFlagsKHR* pModes)
365 return vkmock::GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
368 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
369 VkPhysicalDevice physicalDevice,
370 VkSurfaceKHR surface,
371 uint32_t* pRectCount,
374 return vkmock::GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
377 #ifdef VK_USE_PLATFORM_VI_NN
379 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
381 const VkViSurfaceCreateInfoNN* pCreateInfo,
382 const VkAllocationCallbacks* pAllocator,
383 VkSurfaceKHR* pSurface)
385 return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
387 #endif /* VK_USE_PLATFORM_VI_NN */
389 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
390 VkPhysicalDevice physicalDevice,
391 VkSurfaceKHR surface,
392 VkSurfaceCapabilities2EXT* pSurfaceCapabilities)
394 return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
397 #ifdef VK_USE_PLATFORM_IOS_MVK
399 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
401 const VkIOSSurfaceCreateInfoMVK* pCreateInfo,
402 const VkAllocationCallbacks* pAllocator,
403 VkSurfaceKHR* pSurface)
405 return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
407 #endif /* VK_USE_PLATFORM_IOS_MVK */
409 #ifdef VK_USE_PLATFORM_MACOS_MVK
411 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
413 const VkMacOSSurfaceCreateInfoMVK* pCreateInfo,
414 const VkAllocationCallbacks* pAllocator,
415 VkSurfaceKHR* pSurface)
417 return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
419 #endif /* VK_USE_PLATFORM_MACOS_MVK */
425 CUSTOM_C_INTERCEPTS = {
426 'vkCreateInstance': '''
427 // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
428 // apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
429 // ICD should behave as normal.
430 if (loader_interface_version <= 4) {
431 return VK_ERROR_INCOMPATIBLE_DRIVER;
433 *pInstance = (VkInstance)CreateDispObjHandle();
434 for (auto& physical_device : physical_device_map[*pInstance])
435 physical_device = (VkPhysicalDevice)CreateDispObjHandle();
436 // TODO: If emulating specific device caps, will need to add intelligence here
439 'vkDestroyInstance': '''
441 for (const auto physical_device : physical_device_map.at(instance))
442 DestroyDispObjHandle((void*)physical_device);
443 physical_device_map.erase(instance);
444 DestroyDispObjHandle((void*)instance);
447 'vkEnumeratePhysicalDevices': '''
448 VkResult result_code = VK_SUCCESS;
449 if (pPhysicalDevices) {
450 const auto return_count = (std::min)(*pPhysicalDeviceCount, icd_physical_device_count);
451 for (uint32_t i = 0; i < return_count; ++i) pPhysicalDevices[i] = physical_device_map.at(instance)[i];
452 if (return_count < icd_physical_device_count) result_code = VK_INCOMPLETE;
453 *pPhysicalDeviceCount = return_count;
455 *pPhysicalDeviceCount = icd_physical_device_count;
459 'vkCreateDevice': '''
460 *pDevice = (VkDevice)CreateDispObjHandle();
461 // TODO: If emulating specific device caps, will need to add intelligence here
464 'vkDestroyDevice': '''
465 unique_lock_t lock(global_lock);
466 // First destroy sub-device objects
468 for (auto dev_queue_map_pair : queue_map) {
469 for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) {
470 for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) {
471 DestroyDispObjHandle((void*)index_queue_pair.second);
476 buffer_map.erase(device);
477 image_memory_size_map.erase(device);
478 // Now destroy device
479 DestroyDispObjHandle((void*)device);
480 // TODO: If emulating specific device caps, will need to add intelligence here
482 'vkGetDeviceQueue': '''
483 unique_lock_t lock(global_lock);
484 auto queue = queue_map[device][queueFamilyIndex][queueIndex];
488 *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
490 // TODO: If emulating specific device caps, will need to add intelligence here
493 'vkGetDeviceQueue2': '''
494 GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
495 // TODO: Add further support for GetDeviceQueue2 features
497 'vkEnumerateInstanceLayerProperties': '''
500 'vkEnumerateInstanceVersion': '''
501 *pApiVersion = kSupportedVulkanAPIVersion;
504 'vkEnumerateDeviceLayerProperties': '''
507 'vkEnumerateInstanceExtensionProperties': '''
508 // If requesting number of extensions, return that
511 *pPropertyCount = (uint32_t)instance_extension_map.size();
514 for (const auto &name_ver_pair : instance_extension_map) {
515 if (i == *pPropertyCount) {
518 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
519 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
520 pProperties[i].specVersion = name_ver_pair.second;
523 if (i != instance_extension_map.size()) {
524 return VK_INCOMPLETE;
528 // If requesting extension properties, fill in data struct for number of extensions
531 'vkEnumerateDeviceExtensionProperties': '''
532 // If requesting number of extensions, return that
535 *pPropertyCount = (uint32_t)device_extension_map.size();
538 for (const auto &name_ver_pair : device_extension_map) {
539 if (i == *pPropertyCount) {
542 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
543 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
544 pProperties[i].specVersion = name_ver_pair.second;
547 if (i != device_extension_map.size()) {
548 return VK_INCOMPLETE;
552 // If requesting extension properties, fill in data struct for number of extensions
555 'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
556 // Currently always say that all present modes are supported
557 if (!pPresentModes) {
558 *pPresentModeCount = 6;
560 // Intentionally falling through and just filling however many modes are requested
561 switch(*pPresentModeCount) {
563 pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
566 pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
569 pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
572 pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
575 pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
578 pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
584 'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
585 // Currently always say that RGBA8 & BGRA8 are supported
586 if (!pSurfaceFormats) {
587 *pSurfaceFormatCount = 2;
589 // Intentionally falling through and just filling however many types are requested
590 switch(*pSurfaceFormatCount) {
592 pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
593 pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
596 pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
597 pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
603 'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
604 // Currently always say that RGBA8 & BGRA8 are supported
605 if (!pSurfaceFormats) {
606 *pSurfaceFormatCount = 2;
608 // Intentionally falling through and just filling however many types are requested
609 switch(*pSurfaceFormatCount) {
611 pSurfaceFormats[1].pNext = nullptr;
612 pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
613 pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
616 pSurfaceFormats[1].pNext = nullptr;
617 pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
618 pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
624 'vkGetPhysicalDeviceSurfaceSupportKHR': '''
625 // Currently say that all surface/queue combos are supported
626 *pSupported = VK_TRUE;
629 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
630 // In general just say max supported is available for requested surface
631 pSurfaceCapabilities->minImageCount = 1;
632 pSurfaceCapabilities->maxImageCount = 0;
633 pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
634 pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
635 pSurfaceCapabilities->minImageExtent.width = 1;
636 pSurfaceCapabilities->minImageExtent.height = 1;
637 pSurfaceCapabilities->maxImageExtent.width = 0xFFFF;
638 pSurfaceCapabilities->maxImageExtent.height = 0xFFFF;
639 pSurfaceCapabilities->maxImageArrayLayers = 128;
640 pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
641 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
642 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
643 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
644 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
645 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
646 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
647 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
648 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
649 pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
650 pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
651 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
652 VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
653 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
654 pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
655 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
656 VK_IMAGE_USAGE_SAMPLED_BIT |
657 VK_IMAGE_USAGE_STORAGE_BIT |
658 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
659 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
660 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
661 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
664 'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
665 GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
668 'vkGetInstanceProcAddr': '''
669 if (!negotiate_loader_icd_interface_called) {
670 loader_interface_version = 0;
672 const auto &item = name_to_funcptr_map.find(pName);
673 if (item != name_to_funcptr_map.end()) {
674 return reinterpret_cast<PFN_vkVoidFunction>(item->second);
676 // Mock should intercept all functions so if we get here just return null
679 'vkGetDeviceProcAddr': '''
680 return GetInstanceProcAddr(nullptr, pName);
682 'vkGetPhysicalDeviceMemoryProperties': '''
683 pMemoryProperties->memoryTypeCount = 2;
684 pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
685 pMemoryProperties->memoryTypes[0].heapIndex = 0;
686 pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
687 pMemoryProperties->memoryTypes[1].heapIndex = 1;
688 pMemoryProperties->memoryHeapCount = 2;
689 pMemoryProperties->memoryHeaps[0].flags = 0;
690 pMemoryProperties->memoryHeaps[0].size = 8000000000;
691 pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
692 pMemoryProperties->memoryHeaps[1].size = 8000000000;
694 'vkGetPhysicalDeviceMemoryProperties2KHR': '''
695 GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
697 'vkGetPhysicalDeviceQueueFamilyProperties': '''
698 if (!pQueueFamilyProperties) {
699 *pQueueFamilyPropertyCount = 1;
701 if (*pQueueFamilyPropertyCount) {
702 pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
703 pQueueFamilyProperties[0].queueCount = 1;
704 pQueueFamilyProperties[0].timestampValidBits = 0;
705 pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1};
709 'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
710 if (pQueueFamilyPropertyCount && pQueueFamilyProperties) {
711 GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties);
713 GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr);
716 'vkGetPhysicalDeviceFeatures': '''
717 uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
718 VkBool32 *bool_array = &pFeatures->robustBufferAccess;
719 SetBoolArrayTrue(bool_array, num_bools);
721 'vkGetPhysicalDeviceFeatures2KHR': '''
722 GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
723 uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
724 VkBool32* feat_bools = nullptr;
725 const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
726 if (desc_idx_features) {
727 const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
728 num_bools = bool_size/sizeof(VkBool32);
729 feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
730 SetBoolArrayTrue(feat_bools, num_bools);
732 const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
733 if (blendop_features) {
734 const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
735 num_bools = bool_size/sizeof(VkBool32);
736 feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
737 SetBoolArrayTrue(feat_bools, num_bools);
740 'vkGetPhysicalDeviceFormatProperties': '''
741 if (VK_FORMAT_UNDEFINED == format) {
742 *pFormatProperties = { 0x0, 0x0, 0x0 };
744 // TODO: Just returning full support for everything initially
745 *pFormatProperties = { 0x00FFFFFF, 0x00FFFFFF, 0x00FFFFFF };
748 'vkGetPhysicalDeviceFormatProperties2KHR': '''
749 GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
751 'vkGetPhysicalDeviceImageFormatProperties': '''
752 // A hardcoded unsupported format
753 if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
754 return VK_ERROR_FORMAT_NOT_SUPPORTED;
757 // TODO: Just hard-coding some values for now
758 // TODO: If tiling is linear, limit the mips, levels, & sample count
759 if (VK_IMAGE_TILING_LINEAR == tiling) {
760 *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
762 // We hard-code support for all sample counts except 64 bits.
763 *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
767 'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
768 GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
771 'vkGetPhysicalDeviceProperties': '''
772 // TODO: Just hard-coding some values for now
773 pProperties->apiVersion = kSupportedVulkanAPIVersion;
774 pProperties->driverVersion = 1;
775 pProperties->vendorID = 0xba5eba11;
776 pProperties->deviceID = 0xf005ba11;
777 pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
778 //std::string devName = "Vulkan Mock Device";
779 strcpy(pProperties->deviceName, "Vulkan Mock Device");
780 pProperties->pipelineCacheUUID[0] = 18;
781 pProperties->limits = SetLimits(&pProperties->limits);
782 pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
784 'vkGetPhysicalDeviceProperties2KHR': '''
785 GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
786 const auto *desc_idx_props = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>(pProperties->pNext);
787 if (desc_idx_props) {
788 VkPhysicalDeviceDescriptorIndexingPropertiesEXT* write_props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)desc_idx_props;
789 write_props->maxUpdateAfterBindDescriptorsInAllPools = 500000;
790 write_props->shaderUniformBufferArrayNonUniformIndexingNative = false;
791 write_props->shaderSampledImageArrayNonUniformIndexingNative = false;
792 write_props->shaderStorageBufferArrayNonUniformIndexingNative = false;
793 write_props->shaderStorageImageArrayNonUniformIndexingNative = false;
794 write_props->shaderInputAttachmentArrayNonUniformIndexingNative = false;
795 write_props->robustBufferAccessUpdateAfterBind = true;
796 write_props->quadDivergentImplicitLod = true;
797 write_props->maxPerStageDescriptorUpdateAfterBindSamplers = 500000;
798 write_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers = 500000;
799 write_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers = 500000;
800 write_props->maxPerStageDescriptorUpdateAfterBindSampledImages = 500000;
801 write_props->maxPerStageDescriptorUpdateAfterBindStorageImages = 500000;
802 write_props->maxPerStageDescriptorUpdateAfterBindInputAttachments = 500000;
803 write_props->maxPerStageUpdateAfterBindResources = 500000;
804 write_props->maxDescriptorSetUpdateAfterBindSamplers = 500000;
805 write_props->maxDescriptorSetUpdateAfterBindUniformBuffers = 96;
806 write_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 8;
807 write_props->maxDescriptorSetUpdateAfterBindStorageBuffers = 500000;
808 write_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 4;
809 write_props->maxDescriptorSetUpdateAfterBindSampledImages = 500000;
810 write_props->maxDescriptorSetUpdateAfterBindStorageImages = 500000;
811 write_props->maxDescriptorSetUpdateAfterBindInputAttachments = 500000;
814 const auto *push_descriptor_props = lvl_find_in_chain<VkPhysicalDevicePushDescriptorPropertiesKHR>(pProperties->pNext);
815 if (push_descriptor_props) {
816 VkPhysicalDevicePushDescriptorPropertiesKHR* write_props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)push_descriptor_props;
817 write_props->maxPushDescriptors = 256;
820 const auto *depth_stencil_resolve_props = lvl_find_in_chain<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>(pProperties->pNext);
821 if (depth_stencil_resolve_props) {
822 VkPhysicalDeviceDepthStencilResolvePropertiesKHR* write_props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)depth_stencil_resolve_props;
823 write_props->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
824 write_props->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
827 'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
828 // Hard code support for all handle types and features
829 pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
830 pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
831 pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
833 'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
834 GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
836 'vkGetPhysicalDeviceExternalFenceProperties':'''
837 // Hard-code support for all handle types and features
838 pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
839 pExternalFenceProperties->compatibleHandleTypes = 0xF;
840 pExternalFenceProperties->externalFenceFeatures = 0x3;
842 'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
843 GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
845 'vkGetPhysicalDeviceExternalBufferProperties':'''
846 // Hard-code support for all handle types and features
847 pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
848 pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0x1FF;
849 pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0x1FF;
851 'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
852 GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
854 'vkGetBufferMemoryRequirements': '''
855 // TODO: Just hard-coding reqs for now
856 pMemoryRequirements->size = 4096;
857 pMemoryRequirements->alignment = 1;
858 pMemoryRequirements->memoryTypeBits = 0xFFFF;
859 // Return a better size based on the buffer size from the create info.
860 auto d_iter = buffer_map.find(device);
861 if (d_iter != buffer_map.end()) {
862 auto iter = d_iter->second.find(buffer);
863 if (iter != d_iter->second.end()) {
864 pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
868 'vkGetBufferMemoryRequirements2KHR': '''
869 GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
871 'vkGetImageMemoryRequirements': '''
872 pMemoryRequirements->size = 0;
873 pMemoryRequirements->alignment = 1;
875 auto d_iter = image_memory_size_map.find(device);
876 if(d_iter != image_memory_size_map.end()){
877 auto iter = d_iter->second.find(image);
878 if (iter != d_iter->second.end()) {
879 pMemoryRequirements->size = iter->second;
882 // Here we hard-code that the memory type at index 3 doesn't support this image.
883 pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
885 'vkGetImageMemoryRequirements2KHR': '''
886 GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
889 unique_lock_t lock(global_lock);
890 if (VK_WHOLE_SIZE == size) {
891 if (allocated_memory_size_map.count(memory) != 0)
892 size = allocated_memory_size_map[memory] - offset;
896 void* map_addr = malloc((size_t)size);
897 mapped_memory_map[memory].push_back(map_addr);
902 unique_lock_t lock(global_lock);
903 for (auto map_addr : mapped_memory_map[memory]) {
906 mapped_memory_map.erase(memory);
908 'vkGetImageSubresourceLayout': '''
909 // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure.
910 *pLayout = VkSubresourceLayout(); // Default constructor zero values.
912 'vkCreateSwapchainKHR': '''
913 unique_lock_t lock(global_lock);
914 *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
915 for(uint32_t i = 0; i < icd_swapchain_image_count; ++i){
916 swapchain_image_map[*pSwapchain][i] = (VkImage)global_unique_handle++;
920 'vkDestroySwapchainKHR': '''
921 unique_lock_t lock(global_lock);
922 swapchain_image_map.clear();
924 'vkGetSwapchainImagesKHR': '''
925 if (!pSwapchainImages) {
926 *pSwapchainImageCount = icd_swapchain_image_count;
928 unique_lock_t lock(global_lock);
929 for (uint32_t img_i = 0; img_i < (std::min)(*pSwapchainImageCount, icd_swapchain_image_count); ++img_i){
930 pSwapchainImages[img_i] = swapchain_image_map.at(swapchain)[img_i];
933 if (*pSwapchainImageCount < icd_swapchain_image_count) return VK_INCOMPLETE;
934 else if (*pSwapchainImageCount > icd_swapchain_image_count) *pSwapchainImageCount = icd_swapchain_image_count;
938 'vkAcquireNextImageKHR': '''
942 'vkAcquireNextImage2KHR': '''
946 'vkCreateBuffer': '''
947 unique_lock_t lock(global_lock);
948 *pBuffer = (VkBuffer)global_unique_handle++;
949 buffer_map[device][*pBuffer] = *pCreateInfo;
952 'vkDestroyBuffer': '''
953 unique_lock_t lock(global_lock);
954 buffer_map[device].erase(buffer);
957 unique_lock_t lock(global_lock);
958 *pImage = (VkImage)global_unique_handle++;
959 // TODO: A pixel size is 32 bytes. This accounts for the largest possible pixel size of any format. It could be changed to more accurate size if need be.
960 image_memory_size_map[device][*pImage] = pCreateInfo->extent.width * pCreateInfo->extent.height * pCreateInfo->extent.depth *
961 32 * pCreateInfo->arrayLayers * (pCreateInfo->mipLevels > 1 ? 2 : 1);
963 switch (pCreateInfo->format) {
964 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
965 case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
966 case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
967 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
968 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
969 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
970 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
971 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
972 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
973 case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
974 case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
975 case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
976 image_memory_size_map[device][*pImage] *= 3;
978 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
979 case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
980 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
981 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
982 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
983 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
984 case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
985 case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
986 image_memory_size_map[device][*pImage] *= 2;
993 'vkDestroyImage': '''
994 unique_lock_t lock(global_lock);
995 image_memory_size_map[device].erase(image);
999 # MockICDGeneratorOptions - subclass of GeneratorOptions.
1001 # Adds options used by MockICDOutputGenerator objects during Mock
1004 # Additional members
1005 # prefixText - list of strings to prefix generated header with
1006 # (usually a copyright statement + calling convention macros).
1007 # protectFile - True if multiple inclusion protection should be
1008 # generated (based on the filename) around the entire header.
1009 # protectFeature - True if #ifndef..#endif protection should be
1010 # generated around a feature interface in the header file.
1011 # genFuncPointers - True if function pointer typedefs should be
1013 # protectProto - If conditional protection should be generated
1014 # around prototype declarations, set to either '#ifdef'
1015 # to require opt-in (#ifdef protectProtoStr) or '#ifndef'
1016 # to require opt-out (#ifndef protectProtoStr). Otherwise
1018 # protectProtoStr - #ifdef/#ifndef symbol to use around prototype
1019 # declarations, if protectProto is set
1020 # apicall - string to use for the function declaration prefix,
1021 # such as APICALL on Windows.
1022 # apientry - string to use for the calling convention macro,
1023 # in typedefs, such as APIENTRY.
1024 # apientryp - string to use for the calling convention macro
1025 # in function pointer typedefs, such as APIENTRYP.
1026 # indentFuncProto - True if prototype declarations should put each
1027 # parameter on a separate line
1028 # indentFuncPointer - True if typedefed function pointers should put each
1029 # parameter on a separate line
1030 # alignFuncParam - if nonzero and parameters are being put on a
1031 # separate line, align parameter names at the specified column
1032 class MockICDGeneratorOptions(GeneratorOptions):
1041 emitversions = '.*',
1042 defaultExtensions = None,
1043 addExtensions = None,
1044 removeExtensions = None,
1045 emitExtensions = None,
1046 sortProcedure = regSortFeatures,
1048 genFuncPointers = True,
1050 protectFeature = True,
1051 protectProto = None,
1052 protectProtoStr = None,
1056 indentFuncProto = True,
1057 indentFuncPointer = False,
1059 expandEnumerants = True,
1060 helper_file_type = ''):
1061 GeneratorOptions.__init__(self,
1062 conventions = conventions,
1063 filename = filename,
1064 directory = directory,
1068 versions = versions,
1069 emitversions = emitversions,
1070 defaultExtensions = defaultExtensions,
1071 addExtensions = addExtensions,
1072 removeExtensions = removeExtensions,
1073 emitExtensions = emitExtensions,
1074 sortProcedure = sortProcedure)
1075 self.prefixText = prefixText
1076 self.genFuncPointers = genFuncPointers
1077 self.protectFile = protectFile
1078 self.protectFeature = protectFeature
1079 self.protectProto = protectProto
1080 self.protectProtoStr = protectProtoStr
1081 self.apicall = apicall
1082 self.apientry = apientry
1083 self.apientryp = apientryp
1084 self.indentFuncProto = indentFuncProto
1085 self.indentFuncPointer = indentFuncPointer
1086 self.alignFuncParam = alignFuncParam
1088 # MockICDOutputGenerator - subclass of OutputGenerator.
1089 # Generates a mock vulkan ICD.
1090 # This is intended to be a minimal replacement for a vulkan device in order
1091 # to enable Vulkan Validation testing.
1094 # MockOutputGenerator(errFile, warnFile, diagFile) - args as for
1095 # OutputGenerator. Defines additional internal state.
1096 # ---- methods overriding base class ----
1097 # beginFile(genOpts)
1099 # beginFeature(interface, emit)
1101 # genType(typeinfo,name)
1102 # genStruct(typeinfo,name)
1103 # genGroup(groupinfo,name)
1104 # genEnum(enuminfo, name)
1106 class MockICDOutputGenerator(OutputGenerator):
1107 """Generate specified API interfaces in a specific style, such as a C header"""
1108 # This is an ordered list of sections in the header file.
1109 TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
1110 'group', 'bitmask', 'funcpointer', 'struct']
1111 ALL_SECTIONS = TYPE_SECTIONS + ['command']
1113 errFile = sys.stderr,
1114 warnFile = sys.stderr,
1115 diagFile = sys.stdout):
1116 OutputGenerator.__init__(self, errFile, warnFile, diagFile)
1117 # Internal state - accumulators for different inner block text
1118 self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1119 self.intercepts = []
1121 # Check if the parameter passed in is a pointer to an array
1122 def paramIsArray(self, param):
1123 return param.attrib.get('len') is not None
1125 # Check if the parameter passed in is a pointer
1126 def paramIsPointer(self, param):
1129 if ((elem.tag != 'type') and (elem.tail is not None)) and '*' in elem.tail:
1133 # Check if an object is a non-dispatchable handle
1134 def isHandleTypeNonDispatchable(self, handletype):
1135 handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1136 if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
1141 # Check if an object is a dispatchable handle
1142 def isHandleTypeDispatchable(self, handletype):
1143 handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1144 if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
1149 def beginFile(self, genOpts):
1150 OutputGenerator.beginFile(self, genOpts)
1153 # Multiple inclusion protection & C++ namespace.
1155 if (genOpts.protectFile and self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
1157 headerSym = '__' + re.sub(r'\.h', '_h_', os.path.basename(self.genOpts.filename))
1158 write('#ifndef', headerSym, file=self.outFile)
1159 write('#define', headerSym, '1', file=self.outFile)
1162 # User-supplied prefix text, if any (list of strings)
1163 if (genOpts.prefixText):
1164 for s in genOpts.prefixText:
1165 write(s, file=self.outFile)
1167 write('#include <unordered_map>', file=self.outFile)
1168 write('#include <mutex>', file=self.outFile)
1169 write('#include <string>', file=self.outFile)
1170 write('#include <cstring>', file=self.outFile)
1171 write('#include "vulkan/vk_icd.h"', file=self.outFile)
1173 write('#include "mock_icd.h"', file=self.outFile)
1174 write('#include <stdlib.h>', file=self.outFile)
1175 write('#include <algorithm>', file=self.outFile)
1176 write('#include <array>', file=self.outFile)
1177 write('#include <vector>', file=self.outFile)
1178 write('#include "vk_typemap_helper.h"', file=self.outFile)
1180 write('namespace vkmock {', file=self.outFile)
1183 write(HEADER_C_CODE, file=self.outFile)
1184 # Include all of the extensions in ICD except specific ignored ones
1187 # Ignore extensions that ICDs should not implement or are not safe to report
1188 ignore_exts = ['VK_EXT_validation_cache']
1189 for ext in self.registry.tree.findall("extensions/extension"):
1190 if ext.attrib['supported'] != 'disabled': # Only include enabled extensions
1191 if (ext.attrib['name'] in ignore_exts):
1193 elif (ext.attrib.get('type') and 'instance' == ext.attrib['type']):
1194 instance_exts.append(' {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1196 device_exts.append(' {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1197 write('// Map of instance extension name to version', file=self.outFile)
1198 write('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {', file=self.outFile)
1199 write('\n'.join(instance_exts), file=self.outFile)
1200 write('};', file=self.outFile)
1201 write('// Map of device extension name to version', file=self.outFile)
1202 write('static const std::unordered_map<std::string, uint32_t> device_extension_map = {', file=self.outFile)
1203 write('\n'.join(device_exts), file=self.outFile)
1204 write('};', file=self.outFile)
1208 write(SOURCE_CPP_PREFIX, file=self.outFile)
1212 # Finish C++ namespace and multiple inclusion protection
1215 # record intercepted procedures
1216 write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
1217 write('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
1218 write('\n'.join(self.intercepts), file=self.outFile)
1219 write('};\n', file=self.outFile)
1221 write('} // namespace vkmock', file=self.outFile)
1223 write('#endif', file=self.outFile)
1224 else: # Loader-layer-interface, need to implement global interface functions
1225 write(SOURCE_CPP_POSTFIX, file=self.outFile)
1226 # Finish processing in superclass
1227 OutputGenerator.endFile(self)
1228 def beginFeature(self, interface, emit):
1229 #write('// starting beginFeature', file=self.outFile)
1230 # Start processing in superclass
1231 OutputGenerator.beginFeature(self, interface, emit)
1232 self.featureExtraProtect = GetFeatureProtect(interface)
1234 # Accumulate includes, defines, types, enums, function pointer typedefs,
1235 # end function prototypes separately for this feature. They're only
1236 # printed in endFeature().
1237 self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1238 #write('// ending beginFeature', file=self.outFile)
1239 def endFeature(self):
1241 # Actually write the interface to the output file.
1242 #write('// starting endFeature', file=self.outFile)
1245 if (self.genOpts.protectFeature):
1246 write('#ifndef', self.featureName, file=self.outFile)
1247 # If type declarations are needed by other features based on
1248 # this one, it may be necessary to suppress the ExtraProtect,
1249 # or move it below the 'for section...' loop.
1250 #write('// endFeature looking at self.featureExtraProtect', file=self.outFile)
1251 if (self.featureExtraProtect != None):
1252 write('#ifdef', self.featureExtraProtect, file=self.outFile)
1253 #write('#define', self.featureName, '1', file=self.outFile)
1254 for section in self.TYPE_SECTIONS:
1255 #write('// endFeature writing section'+section, file=self.outFile)
1256 contents = self.sections[section]
1258 write('\n'.join(contents), file=self.outFile)
1260 #write('// endFeature looking at self.sections[command]', file=self.outFile)
1261 if (self.sections['command']):
1262 write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
1264 if (self.featureExtraProtect != None):
1265 write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile)
1266 if (self.genOpts.protectFeature):
1267 write('#endif /*', self.featureName, '*/', file=self.outFile)
1268 # Finish processing in superclass
1269 OutputGenerator.endFeature(self)
1270 #write('// ending endFeature', file=self.outFile)
1272 # Append a definition to the specified section
1273 def appendSection(self, section, text):
1274 # self.sections[section].append('SECTION: ' + section + '\n')
1275 self.sections[section].append(text)
1278 def genType(self, typeinfo, name, alias):
1281 # Struct (e.g. C "struct" type) generation.
1282 # This is a special case of the <type> tag where the contents are
1283 # interpreted as a set of <member> tags instead of freeform C
1284 # C type declarations. The <member> tags are just like <param>
1285 # tags - they are a declaration of a struct or union member.
1286 # Only simple member declarations are supported (no nested
1288 def genStruct(self, typeinfo, typeName, alias):
1289 OutputGenerator.genStruct(self, typeinfo, typeName, alias)
1290 body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
1291 # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
1292 for member in typeinfo.elem.findall('.//member'):
1293 body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
1295 body += '} ' + typeName + ';\n'
1296 self.appendSection('struct', body)
1298 # Group (e.g. C "enum" type) generation.
1299 # These are concatenated together with other types.
1300 def genGroup(self, groupinfo, groupName, alias):
1302 # Enumerant generation
1303 # <enum> tags may specify their values in several ways, but are usually
1305 def genEnum(self, enuminfo, name, alias):
1308 # Command generation
1309 def genCmd(self, cmdinfo, name, alias):
1310 decls = self.makeCDecls(cmdinfo.elem)
1311 if self.header: # In the header declare all intercepts
1312 self.appendSection('command', '')
1313 self.appendSection('command', 'static %s' % (decls[0]))
1314 if (self.featureExtraProtect != None):
1315 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1316 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
1317 if (self.featureExtraProtect != None):
1318 self.intercepts += [ '#endif' ]
1321 manual_functions = [
1322 # Include functions here to be intercepted w/ manually implemented function bodies
1323 'vkGetDeviceProcAddr',
1324 'vkGetInstanceProcAddr',
1328 'vkDestroyInstance',
1329 #'vkCreateDebugReportCallbackEXT',
1330 #'vkDestroyDebugReportCallbackEXT',
1331 'vkEnumerateInstanceLayerProperties',
1332 'vkEnumerateInstanceVersion',
1333 'vkEnumerateInstanceExtensionProperties',
1334 'vkEnumerateDeviceLayerProperties',
1335 'vkEnumerateDeviceExtensionProperties',
1337 if name in manual_functions:
1338 self.appendSection('command', '')
1339 if name not in CUSTOM_C_INTERCEPTS:
1340 self.appendSection('command', '// declare only')
1341 self.appendSection('command', 'static %s' % (decls[0]))
1342 self.appendSection('command', '// TODO: Implement custom intercept body')
1344 self.appendSection('command', 'static %s' % (decls[0][:-1]))
1345 self.appendSection('command', '{\n%s}' % (CUSTOM_C_INTERCEPTS[name]))
1346 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
1348 # record that the function will be intercepted
1349 if (self.featureExtraProtect != None):
1350 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1351 self.intercepts += [ ' {"%s", (void*)%s},' % (name,name[2:]) ]
1352 if (self.featureExtraProtect != None):
1353 self.intercepts += [ '#endif' ]
1355 OutputGenerator.genCmd(self, cmdinfo, name, alias)
1357 self.appendSection('command', '')
1358 self.appendSection('command', 'static %s' % (decls[0][:-1]))
1359 if name in CUSTOM_C_INTERCEPTS:
1360 self.appendSection('command', '{%s}' % (CUSTOM_C_INTERCEPTS[name]))
1363 # Declare result variable, if any.
1364 resulttype = cmdinfo.elem.find('proto/type')
1365 if (resulttype != None and resulttype.text == 'void'):
1367 # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
1368 # Call the KHR custom version instead of generating separate code
1369 khr_name = name + "KHR"
1370 if khr_name in CUSTOM_C_INTERCEPTS:
1372 if resulttype != None:
1373 return_string = 'return '
1374 params = cmdinfo.elem.findall('param/name')
1376 for param in params:
1377 param_names.append(param.text)
1378 self.appendSection('command', '{\n %s%s(%s);\n}' % (return_string, khr_name[2:], ", ".join(param_names)))
1380 self.appendSection('command', '{')
1382 api_function_name = cmdinfo.elem.attrib.get('name')
1383 # GET THE TYPE OF FUNCTION
1384 if True in [ftxt in api_function_name for ftxt in ['Create', 'Allocate']]:
1386 last_param = cmdinfo.elem.findall('param')[-1]
1387 lp_txt = last_param.find('name').text
1389 if ('len' in last_param.attrib):
1390 lp_len = last_param.attrib['len']
1391 lp_len = lp_len.replace('::', '->')
1392 lp_type = last_param.find('type').text
1393 handle_type = 'dispatchable'
1394 allocator_txt = 'CreateDispObjHandle()';
1395 if (self.isHandleTypeNonDispatchable(lp_type)):
1396 handle_type = 'non-' + handle_type
1397 allocator_txt = 'global_unique_handle++';
1398 # Need to lock in both cases
1399 self.appendSection('command', ' unique_lock_t lock(global_lock);')
1400 if (lp_len != None):
1401 #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
1402 self.appendSection('command', ' for (uint32_t i = 0; i < %s; ++i) {' % (lp_len))
1403 self.appendSection('command', ' %s[i] = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1404 self.appendSection('command', ' }')
1406 #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
1407 if 'AllocateMemory' in api_function_name:
1408 # Store allocation size in case it's mapped
1409 self.appendSection('command', ' allocated_memory_size_map[(VkDeviceMemory)global_unique_handle] = pAllocateInfo->allocationSize;')
1410 self.appendSection('command', ' *%s = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1411 elif True in [ftxt in api_function_name for ftxt in ['Destroy', 'Free']]:
1412 self.appendSection('command', '//Destroy object')
1413 if 'FreeMemory' in api_function_name:
1414 # Remove from allocation map
1415 self.appendSection('command', ' allocated_memory_size_map.erase(memory);')
1417 self.appendSection('command', '//Not a CREATE or DESTROY function')
1419 # Return result variable, if any.
1420 if (resulttype != None):
1421 if api_function_name == 'vkGetEventStatus':
1422 self.appendSection('command', ' return VK_EVENT_SET;')
1424 self.appendSection('command', ' return VK_SUCCESS;')
1425 self.appendSection('command', '}')
1427 # override makeProtoName to drop the "vk" prefix
1428 def makeProtoName(self, name, tail):
1429 return self.genOpts.apientry + name[2:] + tail