Fix FreeCommandBuffers leak
[platform/upstream/Vulkan-Tools.git] / scripts / mock_icd_generator.py
1 #!/usr/bin/python3 -i
2 #
3 # Copyright (c) 2015-2021 The Khronos Group Inc.
4 # Copyright (c) 2015-2021 Valve Corporation
5 # Copyright (c) 2015-2021 LunarG, Inc.
6 # Copyright (c) 2015-2021 Google Inc.
7 #
8 # Licensed under the Apache License, Version 2.0 (the "License");
9 # you may not use this file except in compliance with the License.
10 # You may obtain a copy of the License at
11 #
12 #     http://www.apache.org/licenses/LICENSE-2.0
13 #
14 # Unless required by applicable law or agreed to in writing, software
15 # distributed under the License is distributed on an "AS IS" BASIS,
16 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # See the License for the specific language governing permissions and
18 # limitations under the License.
19 #
20 # Author: Tobin Ehlis <tobine@google.com>
21 #
22 # This script generates a Mock ICD that intercepts almost all Vulkan
23 #  functions. That layer is not intended to be useful or even compilable
24 #  in its initial state. Rather it's intended to be a starting point that
25 #  can be copied and customized to assist in creation of a new layer.
26
27 import os,re,sys
28 from generator import *
29 from common_codegen import *
30
31
32 # Mock header code
33 HEADER_C_CODE = '''
34 using mutex_t = std::mutex;
35 using lock_guard_t = std::lock_guard<mutex_t>;
36 using unique_lock_t = std::unique_lock<mutex_t>;
37
38 static mutex_t global_lock;
39 static uint64_t global_unique_handle = 1;
40 static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5;
41 static uint32_t loader_interface_version = 0;
42 static bool negotiate_loader_icd_interface_called = false;
43 static void* CreateDispObjHandle() {
44     auto handle = new VK_LOADER_DATA;
45     set_loader_magic_value(handle);
46     return handle;
47 }
48 static void DestroyDispObjHandle(void* handle) {
49     delete reinterpret_cast<VK_LOADER_DATA*>(handle);
50 }
51 '''
52
53 # Manual code at the top of the cpp source file
54 SOURCE_CPP_PREFIX = '''
55 using std::unordered_map;
56
57 static constexpr uint32_t icd_physical_device_count = 1;
58 static constexpr uint32_t kSupportedVulkanAPIVersion = VK_API_VERSION_1_1;
59 static unordered_map<VkInstance, std::array<VkPhysicalDevice, icd_physical_device_count>> physical_device_map;
60
61 // Map device memory handle to any mapped allocations that we'll need to free on unmap
62 static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map;
63
64 // Map device memory allocation handle to the size
65 static unordered_map<VkDeviceMemory, VkDeviceSize> allocated_memory_size_map;
66
67 static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map;
68 static unordered_map<VkDevice, unordered_map<VkBuffer, VkBufferCreateInfo>> buffer_map;
69 static unordered_map<VkDevice, unordered_map<VkImage, VkDeviceSize>> image_memory_size_map;
70
71 static constexpr uint32_t icd_swapchain_image_count = 1;
72 static unordered_map<VkSwapchainKHR, VkImage[icd_swapchain_image_count]> swapchain_image_map;
73
74 // TODO: Would like to codegen this but limits aren't in XML
75 static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) {
76     limits->maxImageDimension1D = 4096;
77     limits->maxImageDimension2D = 4096;
78     limits->maxImageDimension3D = 256;
79     limits->maxImageDimensionCube = 4096;
80     limits->maxImageArrayLayers = 256;
81     limits->maxTexelBufferElements = 65536;
82     limits->maxUniformBufferRange = 16384;
83     limits->maxStorageBufferRange = 134217728;
84     limits->maxPushConstantsSize = 128;
85     limits->maxMemoryAllocationCount = 4096;
86     limits->maxSamplerAllocationCount = 4000;
87     limits->bufferImageGranularity = 1;
88     limits->sparseAddressSpaceSize = 2147483648;
89     limits->maxBoundDescriptorSets = 4;
90     limits->maxPerStageDescriptorSamplers = 16;
91     limits->maxPerStageDescriptorUniformBuffers = 12;
92     limits->maxPerStageDescriptorStorageBuffers = 4;
93     limits->maxPerStageDescriptorSampledImages = 16;
94     limits->maxPerStageDescriptorStorageImages = 4;
95     limits->maxPerStageDescriptorInputAttachments = 4;
96     limits->maxPerStageResources = 128;
97     limits->maxDescriptorSetSamplers = 96;
98     limits->maxDescriptorSetUniformBuffers = 72;
99     limits->maxDescriptorSetUniformBuffersDynamic = 8;
100     limits->maxDescriptorSetStorageBuffers = 24;
101     limits->maxDescriptorSetStorageBuffersDynamic = 4;
102     limits->maxDescriptorSetSampledImages = 96;
103     limits->maxDescriptorSetStorageImages = 24;
104     limits->maxDescriptorSetInputAttachments = 4;
105     limits->maxVertexInputAttributes = 16;
106     limits->maxVertexInputBindings = 16;
107     limits->maxVertexInputAttributeOffset = 2047;
108     limits->maxVertexInputBindingStride = 2048;
109     limits->maxVertexOutputComponents = 64;
110     limits->maxTessellationGenerationLevel = 64;
111     limits->maxTessellationPatchSize = 32;
112     limits->maxTessellationControlPerVertexInputComponents = 64;
113     limits->maxTessellationControlPerVertexOutputComponents = 64;
114     limits->maxTessellationControlPerPatchOutputComponents = 120;
115     limits->maxTessellationControlTotalOutputComponents = 2048;
116     limits->maxTessellationEvaluationInputComponents = 64;
117     limits->maxTessellationEvaluationOutputComponents = 64;
118     limits->maxGeometryShaderInvocations = 32;
119     limits->maxGeometryInputComponents = 64;
120     limits->maxGeometryOutputComponents = 64;
121     limits->maxGeometryOutputVertices = 256;
122     limits->maxGeometryTotalOutputComponents = 1024;
123     limits->maxFragmentInputComponents = 64;
124     limits->maxFragmentOutputAttachments = 4;
125     limits->maxFragmentDualSrcAttachments = 1;
126     limits->maxFragmentCombinedOutputResources = 4;
127     limits->maxComputeSharedMemorySize = 16384;
128     limits->maxComputeWorkGroupCount[0] = 65535;
129     limits->maxComputeWorkGroupCount[1] = 65535;
130     limits->maxComputeWorkGroupCount[2] = 65535;
131     limits->maxComputeWorkGroupInvocations = 128;
132     limits->maxComputeWorkGroupSize[0] = 128;
133     limits->maxComputeWorkGroupSize[1] = 128;
134     limits->maxComputeWorkGroupSize[2] = 64;
135     limits->subPixelPrecisionBits = 4;
136     limits->subTexelPrecisionBits = 4;
137     limits->mipmapPrecisionBits = 4;
138     limits->maxDrawIndexedIndexValue = UINT32_MAX;
139     limits->maxDrawIndirectCount = UINT16_MAX;
140     limits->maxSamplerLodBias = 2.0f;
141     limits->maxSamplerAnisotropy = 16;
142     limits->maxViewports = 16;
143     limits->maxViewportDimensions[0] = 4096;
144     limits->maxViewportDimensions[1] = 4096;
145     limits->viewportBoundsRange[0] = -8192;
146     limits->viewportBoundsRange[1] = 8191;
147     limits->viewportSubPixelBits = 0;
148     limits->minMemoryMapAlignment = 64;
149     limits->minTexelBufferOffsetAlignment = 16;
150     limits->minUniformBufferOffsetAlignment = 16;
151     limits->minStorageBufferOffsetAlignment = 16;
152     limits->minTexelOffset = -8;
153     limits->maxTexelOffset = 7;
154     limits->minTexelGatherOffset = -8;
155     limits->maxTexelGatherOffset = 7;
156     limits->minInterpolationOffset = 0.0f;
157     limits->maxInterpolationOffset = 0.5f;
158     limits->subPixelInterpolationOffsetBits = 4;
159     limits->maxFramebufferWidth = 4096;
160     limits->maxFramebufferHeight = 4096;
161     limits->maxFramebufferLayers = 256;
162     limits->framebufferColorSampleCounts = 0x7F;
163     limits->framebufferDepthSampleCounts = 0x7F;
164     limits->framebufferStencilSampleCounts = 0x7F;
165     limits->framebufferNoAttachmentsSampleCounts = 0x7F;
166     limits->maxColorAttachments = 4;
167     limits->sampledImageColorSampleCounts = 0x7F;
168     limits->sampledImageIntegerSampleCounts = 0x7F;
169     limits->sampledImageDepthSampleCounts = 0x7F;
170     limits->sampledImageStencilSampleCounts = 0x7F;
171     limits->storageImageSampleCounts = 0x7F;
172     limits->maxSampleMaskWords = 1;
173     limits->timestampComputeAndGraphics = VK_TRUE;
174     limits->timestampPeriod = 1;
175     limits->maxClipDistances = 8;
176     limits->maxCullDistances = 8;
177     limits->maxCombinedClipAndCullDistances = 8;
178     limits->discreteQueuePriorities = 2;
179     limits->pointSizeRange[0] = 1.0f;
180     limits->pointSizeRange[1] = 64.0f;
181     limits->lineWidthRange[0] = 1.0f;
182     limits->lineWidthRange[1] = 8.0f;
183     limits->pointSizeGranularity = 1.0f;
184     limits->lineWidthGranularity = 1.0f;
185     limits->strictLines = VK_TRUE;
186     limits->standardSampleLocations = VK_TRUE;
187     limits->optimalBufferCopyOffsetAlignment = 1;
188     limits->optimalBufferCopyRowPitchAlignment = 1;
189     limits->nonCoherentAtomSize = 256;
190
191     return *limits;
192 }
193
194 void SetBoolArrayTrue(VkBool32* bool_array, uint32_t num_bools)
195 {
196     for (uint32_t i = 0; i < num_bools; ++i) {
197         bool_array[i] = VK_TRUE;
198     }
199 }
200 '''
201
202 # Manual code at the end of the cpp source file
203 SOURCE_CPP_POSTFIX = '''
204
205 static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
206     // TODO: This function should only care about physical device functions and return nullptr for other functions
207     const auto &item = name_to_funcptr_map.find(funcName);
208     if (item != name_to_funcptr_map.end()) {
209         return reinterpret_cast<PFN_vkVoidFunction>(item->second);
210     }
211     // Mock should intercept all functions so if we get here just return null
212     return nullptr;
213 }
214
215 } // namespace vkmock
216
217 #if defined(__GNUC__) && __GNUC__ >= 4
218 #define EXPORT __attribute__((visibility("default")))
219 #elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
220 #define EXPORT __attribute__((visibility("default")))
221 #else
222 #define EXPORT
223 #endif
224
225 extern "C" {
226
227 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) {
228     if (!vkmock::negotiate_loader_icd_interface_called) {
229         vkmock::loader_interface_version = 1;
230     }
231     return vkmock::GetInstanceProcAddr(instance, pName);
232 }
233
234 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) {
235     return vkmock::GetPhysicalDeviceProcAddr(instance, pName);
236 }
237
238 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) {
239     vkmock::negotiate_loader_icd_interface_called = true;
240     vkmock::loader_interface_version = *pSupportedVersion;
241     if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
242         *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION;
243     }
244     return VK_SUCCESS;
245 }
246
247
248 EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
249     VkInstance                                  instance,
250     VkSurfaceKHR                                surface,
251     const VkAllocationCallbacks*                pAllocator)
252 {
253     vkmock::DestroySurfaceKHR(instance, surface, pAllocator);
254 }
255
256 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
257     VkPhysicalDevice                            physicalDevice,
258     uint32_t                                    queueFamilyIndex,
259     VkSurfaceKHR                                surface,
260     VkBool32*                                   pSupported)
261 {
262     return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
263 }
264
265 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
266     VkPhysicalDevice                            physicalDevice,
267     VkSurfaceKHR                                surface,
268     VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities)
269 {
270     return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
271 }
272
273 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
274     VkPhysicalDevice                            physicalDevice,
275     VkSurfaceKHR                                surface,
276     uint32_t*                                   pSurfaceFormatCount,
277     VkSurfaceFormatKHR*                         pSurfaceFormats)
278 {
279     return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
280 }
281
282 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
283     VkPhysicalDevice                            physicalDevice,
284     VkSurfaceKHR                                surface,
285     uint32_t*                                   pPresentModeCount,
286     VkPresentModeKHR*                           pPresentModes)
287 {
288     return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
289 }
290
291 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
292     VkInstance                                  instance,
293     const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
294     const VkAllocationCallbacks*                pAllocator,
295     VkSurfaceKHR*                               pSurface)
296 {
297     return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
298 }
299
300 #ifdef VK_USE_PLATFORM_XLIB_KHR
301
302 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
303     VkInstance                                  instance,
304     const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
305     const VkAllocationCallbacks*                pAllocator,
306     VkSurfaceKHR*                               pSurface)
307 {
308     return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
309 }
310 #endif /* VK_USE_PLATFORM_XLIB_KHR */
311
312 #ifdef VK_USE_PLATFORM_XCB_KHR
313
314 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
315     VkInstance                                  instance,
316     const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
317     const VkAllocationCallbacks*                pAllocator,
318     VkSurfaceKHR*                               pSurface)
319 {
320     return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
321 }
322 #endif /* VK_USE_PLATFORM_XCB_KHR */
323
324 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
325
326 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
327     VkInstance                                  instance,
328     const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
329     const VkAllocationCallbacks*                pAllocator,
330     VkSurfaceKHR*                               pSurface)
331 {
332     return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
333 }
334 #endif /* VK_USE_PLATFORM_WAYLAND_KHR */
335
336 #ifdef VK_USE_PLATFORM_ANDROID_KHR
337
338 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
339     VkInstance                                  instance,
340     const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
341     const VkAllocationCallbacks*                pAllocator,
342     VkSurfaceKHR*                               pSurface)
343 {
344     return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
345 }
346 #endif /* VK_USE_PLATFORM_ANDROID_KHR */
347
348 #ifdef VK_USE_PLATFORM_WIN32_KHR
349
350 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
351     VkInstance                                  instance,
352     const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
353     const VkAllocationCallbacks*                pAllocator,
354     VkSurfaceKHR*                               pSurface)
355 {
356     return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
357 }
358 #endif /* VK_USE_PLATFORM_WIN32_KHR */
359
360 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
361     VkDevice                                    device,
362     VkSurfaceKHR                                surface,
363     VkDeviceGroupPresentModeFlagsKHR*           pModes)
364 {
365     return vkmock::GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
366 }
367
368 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
369     VkPhysicalDevice                            physicalDevice,
370     VkSurfaceKHR                                surface,
371     uint32_t*                                   pRectCount,
372     VkRect2D*                                   pRects)
373 {
374     return vkmock::GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
375 }
376
377 #ifdef VK_USE_PLATFORM_VI_NN
378
379 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
380     VkInstance                                  instance,
381     const VkViSurfaceCreateInfoNN*              pCreateInfo,
382     const VkAllocationCallbacks*                pAllocator,
383     VkSurfaceKHR*                               pSurface)
384 {
385     return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
386 }
387 #endif /* VK_USE_PLATFORM_VI_NN */
388
389 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
390     VkPhysicalDevice                            physicalDevice,
391     VkSurfaceKHR                                surface,
392     VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities)
393 {
394     return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
395 }
396
397 #ifdef VK_USE_PLATFORM_IOS_MVK
398
399 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
400     VkInstance                                  instance,
401     const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
402     const VkAllocationCallbacks*                pAllocator,
403     VkSurfaceKHR*                               pSurface)
404 {
405     return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
406 }
407 #endif /* VK_USE_PLATFORM_IOS_MVK */
408
409 #ifdef VK_USE_PLATFORM_MACOS_MVK
410
411 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
412     VkInstance                                  instance,
413     const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
414     const VkAllocationCallbacks*                pAllocator,
415     VkSurfaceKHR*                               pSurface)
416 {
417     return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
418 }
419 #endif /* VK_USE_PLATFORM_MACOS_MVK */
420
421 } // end extern "C"
422
423 '''
424
425 CUSTOM_C_INTERCEPTS = {
426 'vkCreateInstance': '''
427     // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
428     //  apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
429     //  ICD should behave as normal.
430     if (loader_interface_version <= 4) {
431         return VK_ERROR_INCOMPATIBLE_DRIVER;
432     }
433     *pInstance = (VkInstance)CreateDispObjHandle();
434     for (auto& physical_device : physical_device_map[*pInstance])
435         physical_device = (VkPhysicalDevice)CreateDispObjHandle();
436     // TODO: If emulating specific device caps, will need to add intelligence here
437     return VK_SUCCESS;
438 ''',
439 'vkDestroyInstance': '''
440     if (instance) {
441         for (const auto physical_device : physical_device_map.at(instance))
442             DestroyDispObjHandle((void*)physical_device);
443         physical_device_map.erase(instance);
444         DestroyDispObjHandle((void*)instance);
445     }
446 ''',
447 'vkFreeCommandBuffers': '''
448     for (auto i = 0u; i < commandBufferCount; ++i)
449         if (pCommandBuffers[i])
450             DestroyDispObjHandle((void*) pCommandBuffers[i]);
451 ''',
452 'vkEnumeratePhysicalDevices': '''
453     VkResult result_code = VK_SUCCESS;
454     if (pPhysicalDevices) {
455         const auto return_count = (std::min)(*pPhysicalDeviceCount, icd_physical_device_count);
456         for (uint32_t i = 0; i < return_count; ++i) pPhysicalDevices[i] = physical_device_map.at(instance)[i];
457         if (return_count < icd_physical_device_count) result_code = VK_INCOMPLETE;
458         *pPhysicalDeviceCount = return_count;
459     } else {
460         *pPhysicalDeviceCount = icd_physical_device_count;
461     }
462     return result_code;
463 ''',
464 'vkCreateDevice': '''
465     *pDevice = (VkDevice)CreateDispObjHandle();
466     // TODO: If emulating specific device caps, will need to add intelligence here
467     return VK_SUCCESS;
468 ''',
469 'vkDestroyDevice': '''
470     unique_lock_t lock(global_lock);
471     // First destroy sub-device objects
472     // Destroy Queues
473     for (auto dev_queue_map_pair : queue_map) {
474         for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) {
475             for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) {
476                 DestroyDispObjHandle((void*)index_queue_pair.second);
477             }
478         }
479     }
480     queue_map.clear();
481     buffer_map.erase(device);
482     image_memory_size_map.erase(device);
483     // Now destroy device
484     DestroyDispObjHandle((void*)device);
485     // TODO: If emulating specific device caps, will need to add intelligence here
486 ''',
487 'vkGetDeviceQueue': '''
488     unique_lock_t lock(global_lock);
489     auto queue = queue_map[device][queueFamilyIndex][queueIndex];
490     if (queue) {
491         *pQueue = queue;
492     } else {
493         *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
494     }
495     // TODO: If emulating specific device caps, will need to add intelligence here
496     return;
497 ''',
498 'vkGetDeviceQueue2': '''
499     GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
500     // TODO: Add further support for GetDeviceQueue2 features
501 ''',
502 'vkEnumerateInstanceLayerProperties': '''
503     return VK_SUCCESS;
504 ''',
505 'vkEnumerateInstanceVersion': '''
506     *pApiVersion = kSupportedVulkanAPIVersion;
507     return VK_SUCCESS;
508 ''',
509 'vkEnumerateDeviceLayerProperties': '''
510     return VK_SUCCESS;
511 ''',
512 'vkEnumerateInstanceExtensionProperties': '''
513     // If requesting number of extensions, return that
514     if (!pLayerName) {
515         if (!pProperties) {
516             *pPropertyCount = (uint32_t)instance_extension_map.size();
517         } else {
518             uint32_t i = 0;
519             for (const auto &name_ver_pair : instance_extension_map) {
520                 if (i == *pPropertyCount) {
521                     break;
522                 }
523                 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
524                 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
525                 pProperties[i].specVersion = name_ver_pair.second;
526                 ++i;
527             }
528             if (i != instance_extension_map.size()) {
529                 return VK_INCOMPLETE;
530             }
531         }
532     }
533     // If requesting extension properties, fill in data struct for number of extensions
534     return VK_SUCCESS;
535 ''',
536 'vkEnumerateDeviceExtensionProperties': '''
537     // If requesting number of extensions, return that
538     if (!pLayerName) {
539         if (!pProperties) {
540             *pPropertyCount = (uint32_t)device_extension_map.size();
541         } else {
542             uint32_t i = 0;
543             for (const auto &name_ver_pair : device_extension_map) {
544                 if (i == *pPropertyCount) {
545                     break;
546                 }
547                 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
548                 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
549                 pProperties[i].specVersion = name_ver_pair.second;
550                 ++i;
551             }
552             if (i != device_extension_map.size()) {
553                 return VK_INCOMPLETE;
554             }
555         }
556     }
557     // If requesting extension properties, fill in data struct for number of extensions
558     return VK_SUCCESS;
559 ''',
560 'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
561     // Currently always say that all present modes are supported
562     if (!pPresentModes) {
563         *pPresentModeCount = 6;
564     } else {
565         // Intentionally falling through and just filling however many modes are requested
566         switch(*pPresentModeCount) {
567         case 6:
568             pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
569             // fall through
570         case 5:
571             pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
572             // fall through
573         case 4:
574             pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
575             // fall through
576         case 3:
577             pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
578             // fall through
579         case 2:
580             pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
581             // fall through
582         default:
583             pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
584             break;
585         }
586     }
587     return VK_SUCCESS;
588 ''',
589 'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
590     // Currently always say that RGBA8 & BGRA8 are supported
591     if (!pSurfaceFormats) {
592         *pSurfaceFormatCount = 2;
593     } else {
594         // Intentionally falling through and just filling however many types are requested
595         switch(*pSurfaceFormatCount) {
596         case 2:
597             pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
598             pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
599             // fall through
600         default:
601             pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
602             pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
603             break;
604         }
605     }
606     return VK_SUCCESS;
607 ''',
608 'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
609     // Currently always say that RGBA8 & BGRA8 are supported
610     if (!pSurfaceFormats) {
611         *pSurfaceFormatCount = 2;
612     } else {
613         // Intentionally falling through and just filling however many types are requested
614         switch(*pSurfaceFormatCount) {
615         case 2:
616             pSurfaceFormats[1].pNext = nullptr;
617             pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
618             pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
619             // fall through
620         default:
621             pSurfaceFormats[1].pNext = nullptr;
622             pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
623             pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
624             break;
625         }
626     }
627     return VK_SUCCESS;
628 ''',
629 'vkGetPhysicalDeviceSurfaceSupportKHR': '''
630     // Currently say that all surface/queue combos are supported
631     *pSupported = VK_TRUE;
632     return VK_SUCCESS;
633 ''',
634 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
635     // In general just say max supported is available for requested surface
636     pSurfaceCapabilities->minImageCount = 1;
637     pSurfaceCapabilities->maxImageCount = 0;
638     pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
639     pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
640     pSurfaceCapabilities->minImageExtent.width = 1;
641     pSurfaceCapabilities->minImageExtent.height = 1;
642     pSurfaceCapabilities->maxImageExtent.width = 0xFFFF;
643     pSurfaceCapabilities->maxImageExtent.height = 0xFFFF;
644     pSurfaceCapabilities->maxImageArrayLayers = 128;
645     pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
646                                                 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
647                                                 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
648                                                 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
649                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
650                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
651                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
652                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
653                                                 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
654     pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
655     pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
656                                                     VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
657                                                     VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
658                                                     VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
659     pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
660                                                 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
661                                                 VK_IMAGE_USAGE_SAMPLED_BIT |
662                                                 VK_IMAGE_USAGE_STORAGE_BIT |
663                                                 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
664                                                 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
665                                                 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
666                                                 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
667     return VK_SUCCESS;
668 ''',
669 'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
670     GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
671     return VK_SUCCESS;
672 ''',
673 'vkGetInstanceProcAddr': '''
674     if (!negotiate_loader_icd_interface_called) {
675         loader_interface_version = 0;
676     }
677     const auto &item = name_to_funcptr_map.find(pName);
678     if (item != name_to_funcptr_map.end()) {
679         return reinterpret_cast<PFN_vkVoidFunction>(item->second);
680     }
681     // Mock should intercept all functions so if we get here just return null
682     return nullptr;
683 ''',
684 'vkGetDeviceProcAddr': '''
685     return GetInstanceProcAddr(nullptr, pName);
686 ''',
687 'vkGetPhysicalDeviceMemoryProperties': '''
688     pMemoryProperties->memoryTypeCount = 2;
689     pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
690     pMemoryProperties->memoryTypes[0].heapIndex = 0;
691     pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
692     pMemoryProperties->memoryTypes[1].heapIndex = 1;
693     pMemoryProperties->memoryHeapCount = 2;
694     pMemoryProperties->memoryHeaps[0].flags = 0;
695     pMemoryProperties->memoryHeaps[0].size = 8000000000;
696     pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
697     pMemoryProperties->memoryHeaps[1].size = 8000000000;
698 ''',
699 'vkGetPhysicalDeviceMemoryProperties2KHR': '''
700     GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
701 ''',
702 'vkGetPhysicalDeviceQueueFamilyProperties': '''
703     if (!pQueueFamilyProperties) {
704         *pQueueFamilyPropertyCount = 1;
705     } else {
706         if (*pQueueFamilyPropertyCount) {
707             pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
708             pQueueFamilyProperties[0].queueCount = 1;
709             pQueueFamilyProperties[0].timestampValidBits = 0;
710             pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1};
711         }
712     }
713 ''',
714 'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
715     if (pQueueFamilyPropertyCount && pQueueFamilyProperties) {
716         GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties);
717     } else {
718         GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr);
719     }
720 ''',
721 'vkGetPhysicalDeviceFeatures': '''
722     uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
723     VkBool32 *bool_array = &pFeatures->robustBufferAccess;
724     SetBoolArrayTrue(bool_array, num_bools);
725 ''',
726 'vkGetPhysicalDeviceFeatures2KHR': '''
727     GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
728     uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
729     VkBool32* feat_bools = nullptr;
730     const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
731     if (desc_idx_features) {
732         const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
733         num_bools = bool_size/sizeof(VkBool32);
734         feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
735         SetBoolArrayTrue(feat_bools, num_bools);
736     }
737     const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
738     if (blendop_features) {
739         const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
740         num_bools = bool_size/sizeof(VkBool32);
741         feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
742         SetBoolArrayTrue(feat_bools, num_bools);
743     }
744 ''',
745 'vkGetPhysicalDeviceFormatProperties': '''
746     if (VK_FORMAT_UNDEFINED == format) {
747         *pFormatProperties = { 0x0, 0x0, 0x0 };
748     } else {
749         // Default to a color format, skip DS bit
750         *pFormatProperties = { 0x00FFFDFF, 0x00FFFDFF, 0x00FFFDFF };
751         switch (format) {
752             case VK_FORMAT_D16_UNORM:
753             case VK_FORMAT_X8_D24_UNORM_PACK32:
754             case VK_FORMAT_D32_SFLOAT:
755             case VK_FORMAT_S8_UINT:
756             case VK_FORMAT_D16_UNORM_S8_UINT:
757             case VK_FORMAT_D24_UNORM_S8_UINT:
758             case VK_FORMAT_D32_SFLOAT_S8_UINT:
759                 // Don't set color bits for DS formats
760                 *pFormatProperties = { 0x00FFFE7F, 0x00FFFE7F, 0x00FFFE7F };
761                 break;
762             default:
763                 break;
764         }
765     }
766 ''',
767 'vkGetPhysicalDeviceFormatProperties2KHR': '''
768     GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
769     VkFormatProperties3KHR *props_3 = lvl_find_mod_in_chain<VkFormatProperties3KHR>(pFormatProperties->pNext);
770     if (props_3) {
771         props_3->linearTilingFeatures = pFormatProperties->formatProperties.linearTilingFeatures;
772         props_3->optimalTilingFeatures = pFormatProperties->formatProperties.optimalTilingFeatures;
773         props_3->bufferFeatures = pFormatProperties->formatProperties.bufferFeatures;
774     }
775 ''',
776 'vkGetPhysicalDeviceImageFormatProperties': '''
777     // A hardcoded unsupported format
778     if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
779         return VK_ERROR_FORMAT_NOT_SUPPORTED;
780     }
781
782     // TODO: Just hard-coding some values for now
783     // TODO: If tiling is linear, limit the mips, levels, & sample count
784     if (VK_IMAGE_TILING_LINEAR == tiling) {
785         *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
786     } else {
787         // We hard-code support for all sample counts except 64 bits.
788         *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
789     }
790     return VK_SUCCESS;
791 ''',
792 'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
793     GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
794     return VK_SUCCESS;
795 ''',
796 'vkGetPhysicalDeviceProperties': '''
797     // TODO: Just hard-coding some values for now
798     pProperties->apiVersion = kSupportedVulkanAPIVersion;
799     pProperties->driverVersion = 1;
800     pProperties->vendorID = 0xba5eba11;
801     pProperties->deviceID = 0xf005ba11;
802     pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
803     //std::string devName = "Vulkan Mock Device";
804     strcpy(pProperties->deviceName, "Vulkan Mock Device");
805     pProperties->pipelineCacheUUID[0] = 18;
806     pProperties->limits = SetLimits(&pProperties->limits);
807     pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
808 ''',
809 'vkGetPhysicalDeviceProperties2KHR': '''
810     GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
811     const auto *desc_idx_props = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>(pProperties->pNext);
812     if (desc_idx_props) {
813         VkPhysicalDeviceDescriptorIndexingPropertiesEXT* write_props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)desc_idx_props;
814         write_props->maxUpdateAfterBindDescriptorsInAllPools = 500000;
815         write_props->shaderUniformBufferArrayNonUniformIndexingNative = false;
816         write_props->shaderSampledImageArrayNonUniformIndexingNative = false;
817         write_props->shaderStorageBufferArrayNonUniformIndexingNative = false;
818         write_props->shaderStorageImageArrayNonUniformIndexingNative = false;
819         write_props->shaderInputAttachmentArrayNonUniformIndexingNative = false;
820         write_props->robustBufferAccessUpdateAfterBind = true;
821         write_props->quadDivergentImplicitLod = true;
822         write_props->maxPerStageDescriptorUpdateAfterBindSamplers = 500000;
823         write_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers = 500000;
824         write_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers = 500000;
825         write_props->maxPerStageDescriptorUpdateAfterBindSampledImages = 500000;
826         write_props->maxPerStageDescriptorUpdateAfterBindStorageImages = 500000;
827         write_props->maxPerStageDescriptorUpdateAfterBindInputAttachments = 500000;
828         write_props->maxPerStageUpdateAfterBindResources = 500000;
829         write_props->maxDescriptorSetUpdateAfterBindSamplers = 500000;
830         write_props->maxDescriptorSetUpdateAfterBindUniformBuffers = 96;
831         write_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 8;
832         write_props->maxDescriptorSetUpdateAfterBindStorageBuffers = 500000;
833         write_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 4;
834         write_props->maxDescriptorSetUpdateAfterBindSampledImages = 500000;
835         write_props->maxDescriptorSetUpdateAfterBindStorageImages = 500000;
836         write_props->maxDescriptorSetUpdateAfterBindInputAttachments = 500000;
837     }
838
839     const auto *push_descriptor_props = lvl_find_in_chain<VkPhysicalDevicePushDescriptorPropertiesKHR>(pProperties->pNext);
840     if (push_descriptor_props) {
841         VkPhysicalDevicePushDescriptorPropertiesKHR* write_props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)push_descriptor_props;
842         write_props->maxPushDescriptors = 256;
843     }
844
845     const auto *depth_stencil_resolve_props = lvl_find_in_chain<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>(pProperties->pNext);
846     if (depth_stencil_resolve_props) {
847         VkPhysicalDeviceDepthStencilResolvePropertiesKHR* write_props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)depth_stencil_resolve_props;
848         write_props->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
849         write_props->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
850     }
851
852     const auto *fragment_density_map2_props = lvl_find_in_chain<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT>(pProperties->pNext);
853     if (fragment_density_map2_props) {
854         VkPhysicalDeviceFragmentDensityMap2PropertiesEXT* write_props = (VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*)fragment_density_map2_props;
855         write_props->subsampledLoads = VK_FALSE;
856         write_props->subsampledCoarseReconstructionEarlyAccess = VK_FALSE;
857         write_props->maxSubsampledArrayLayers = 2;
858         write_props->maxDescriptorSetSubsampledSamplers = 1;
859     }
860 ''',
861 'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
862     // Hard code support for all handle types and features
863     pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
864     pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
865     pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
866 ''',
867 'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
868     GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
869 ''',
870 'vkGetPhysicalDeviceExternalFenceProperties':'''
871     // Hard-code support for all handle types and features
872     pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
873     pExternalFenceProperties->compatibleHandleTypes = 0xF;
874     pExternalFenceProperties->externalFenceFeatures = 0x3;
875 ''',
876 'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
877     GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
878 ''',
879 'vkGetPhysicalDeviceExternalBufferProperties':'''
880     // Hard-code support for all handle types and features
881     pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
882     pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0x1FF;
883     pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0x1FF;
884 ''',
885 'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
886     GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
887 ''',
888 'vkGetBufferMemoryRequirements': '''
889     // TODO: Just hard-coding reqs for now
890     pMemoryRequirements->size = 4096;
891     pMemoryRequirements->alignment = 1;
892     pMemoryRequirements->memoryTypeBits = 0xFFFF;
893     // Return a better size based on the buffer size from the create info.
894     auto d_iter = buffer_map.find(device);
895     if (d_iter != buffer_map.end()) {
896         auto iter = d_iter->second.find(buffer);
897         if (iter != d_iter->second.end()) {
898             pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
899         }
900     }
901 ''',
902 'vkGetBufferMemoryRequirements2KHR': '''
903     GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
904 ''',
905 'vkGetImageMemoryRequirements': '''
906     pMemoryRequirements->size = 0;
907     pMemoryRequirements->alignment = 1;
908
909     auto d_iter = image_memory_size_map.find(device);
910     if(d_iter != image_memory_size_map.end()){
911         auto iter = d_iter->second.find(image);
912         if (iter != d_iter->second.end()) {
913             pMemoryRequirements->size = iter->second;
914         }
915     }
916     // Here we hard-code that the memory type at index 3 doesn't support this image.
917     pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
918 ''',
919 'vkGetImageMemoryRequirements2KHR': '''
920     GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
921 ''',
922 'vkMapMemory': '''
923     unique_lock_t lock(global_lock);
924     if (VK_WHOLE_SIZE == size) {
925         if (allocated_memory_size_map.count(memory) != 0)
926             size = allocated_memory_size_map[memory] - offset;
927         else
928             size = 0x10000;
929     }
930     void* map_addr = malloc((size_t)size);
931     mapped_memory_map[memory].push_back(map_addr);
932     *ppData = map_addr;
933     return VK_SUCCESS;
934 ''',
935 'vkUnmapMemory': '''
936     unique_lock_t lock(global_lock);
937     for (auto map_addr : mapped_memory_map[memory]) {
938         free(map_addr);
939     }
940     mapped_memory_map.erase(memory);
941 ''',
942 'vkGetImageSubresourceLayout': '''
943     // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure.
944     *pLayout = VkSubresourceLayout(); // Default constructor zero values.
945 ''',
946 'vkCreateSwapchainKHR': '''
947     unique_lock_t lock(global_lock);
948     *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
949     for(uint32_t i = 0; i < icd_swapchain_image_count; ++i){
950         swapchain_image_map[*pSwapchain][i] = (VkImage)global_unique_handle++;
951     }
952     return VK_SUCCESS;
953 ''',
954 'vkDestroySwapchainKHR': '''
955     unique_lock_t lock(global_lock);
956     swapchain_image_map.clear();
957 ''',
958 'vkGetSwapchainImagesKHR': '''
959     if (!pSwapchainImages) {
960         *pSwapchainImageCount = icd_swapchain_image_count;
961     } else {
962         unique_lock_t lock(global_lock);
963         for (uint32_t img_i = 0; img_i < (std::min)(*pSwapchainImageCount, icd_swapchain_image_count); ++img_i){
964             pSwapchainImages[img_i] = swapchain_image_map.at(swapchain)[img_i];
965         }
966
967         if (*pSwapchainImageCount < icd_swapchain_image_count) return VK_INCOMPLETE;
968         else if (*pSwapchainImageCount > icd_swapchain_image_count) *pSwapchainImageCount = icd_swapchain_image_count;
969     }
970     return VK_SUCCESS;
971 ''',
972 'vkAcquireNextImageKHR': '''
973     *pImageIndex = 0;
974     return VK_SUCCESS;
975 ''',
976 'vkAcquireNextImage2KHR': '''
977     *pImageIndex = 0;
978     return VK_SUCCESS;
979 ''',
980 'vkCreateBuffer': '''
981     unique_lock_t lock(global_lock);
982     *pBuffer = (VkBuffer)global_unique_handle++;
983     buffer_map[device][*pBuffer] = *pCreateInfo;
984     return VK_SUCCESS;
985 ''',
986 'vkDestroyBuffer': '''
987     unique_lock_t lock(global_lock);
988     buffer_map[device].erase(buffer);
989 ''',
990 'vkCreateImage': '''
991     unique_lock_t lock(global_lock);
992     *pImage = (VkImage)global_unique_handle++;
993     // TODO: A pixel size is 32 bytes. This accounts for the largest possible pixel size of any format. It could be changed to more accurate size if need be.
994     image_memory_size_map[device][*pImage] = pCreateInfo->extent.width * pCreateInfo->extent.height * pCreateInfo->extent.depth *
995                                              32 * pCreateInfo->arrayLayers * (pCreateInfo->mipLevels > 1 ? 2 : 1);
996     // plane count
997     switch (pCreateInfo->format) {
998         case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
999         case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
1000         case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
1001         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
1002         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
1003         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
1004         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
1005         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
1006         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
1007         case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
1008         case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
1009         case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
1010             image_memory_size_map[device][*pImage] *= 3;
1011             break;
1012         case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
1013         case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
1014         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
1015         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
1016         case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
1017         case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
1018         case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
1019         case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
1020             image_memory_size_map[device][*pImage] *= 2;
1021             break;
1022         default:
1023             break;
1024     }
1025     return VK_SUCCESS;
1026 ''',
1027 'vkDestroyImage': '''
1028     unique_lock_t lock(global_lock);
1029     image_memory_size_map[device].erase(image);
1030 ''',
1031 }
1032
1033 # MockICDGeneratorOptions - subclass of GeneratorOptions.
1034 #
1035 # Adds options used by MockICDOutputGenerator objects during Mock
1036 # ICD generation.
1037 #
1038 # Additional members
1039 #   prefixText - list of strings to prefix generated header with
1040 #     (usually a copyright statement + calling convention macros).
1041 #   protectFile - True if multiple inclusion protection should be
1042 #     generated (based on the filename) around the entire header.
1043 #   protectFeature - True if #ifndef..#endif protection should be
1044 #     generated around a feature interface in the header file.
1045 #   genFuncPointers - True if function pointer typedefs should be
1046 #     generated
1047 #   protectProto - If conditional protection should be generated
1048 #     around prototype declarations, set to either '#ifdef'
1049 #     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
1050 #     to require opt-out (#ifndef protectProtoStr). Otherwise
1051 #     set to None.
1052 #   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
1053 #     declarations, if protectProto is set
1054 #   apicall - string to use for the function declaration prefix,
1055 #     such as APICALL on Windows.
1056 #   apientry - string to use for the calling convention macro,
1057 #     in typedefs, such as APIENTRY.
1058 #   apientryp - string to use for the calling convention macro
1059 #     in function pointer typedefs, such as APIENTRYP.
1060 #   indentFuncProto - True if prototype declarations should put each
1061 #     parameter on a separate line
1062 #   indentFuncPointer - True if typedefed function pointers should put each
1063 #     parameter on a separate line
1064 #   alignFuncParam - if nonzero and parameters are being put on a
1065 #     separate line, align parameter names at the specified column
1066 class MockICDGeneratorOptions(GeneratorOptions):
1067     def __init__(self,
1068                  conventions = None,
1069                  filename = None,
1070                  directory = '.',
1071                  genpath = None,
1072                  apiname = None,
1073                  profile = None,
1074                  versions = '.*',
1075                  emitversions = '.*',
1076                  defaultExtensions = None,
1077                  addExtensions = None,
1078                  removeExtensions = None,
1079                  emitExtensions = None,
1080                  sortProcedure = regSortFeatures,
1081                  prefixText = "",
1082                  genFuncPointers = True,
1083                  protectFile = True,
1084                  protectFeature = True,
1085                  protectProto = None,
1086                  protectProtoStr = None,
1087                  apicall = '',
1088                  apientry = '',
1089                  apientryp = '',
1090                  indentFuncProto = True,
1091                  indentFuncPointer = False,
1092                  alignFuncParam = 0,
1093                  expandEnumerants = True,
1094                  helper_file_type = ''):
1095         GeneratorOptions.__init__(self,
1096                  conventions = conventions,
1097                  filename = filename,
1098                  directory = directory,
1099                  genpath = genpath,
1100                  apiname = apiname,
1101                  profile = profile,
1102                  versions = versions,
1103                  emitversions = emitversions,
1104                  defaultExtensions = defaultExtensions,
1105                  addExtensions = addExtensions,
1106                  removeExtensions = removeExtensions,
1107                  emitExtensions = emitExtensions,
1108                  sortProcedure = sortProcedure)
1109         self.prefixText      = prefixText
1110         self.genFuncPointers = genFuncPointers
1111         self.protectFile     = protectFile
1112         self.protectFeature  = protectFeature
1113         self.protectProto    = protectProto
1114         self.protectProtoStr = protectProtoStr
1115         self.apicall         = apicall
1116         self.apientry        = apientry
1117         self.apientryp       = apientryp
1118         self.indentFuncProto = indentFuncProto
1119         self.indentFuncPointer = indentFuncPointer
1120         self.alignFuncParam  = alignFuncParam
1121
1122 # MockICDOutputGenerator - subclass of OutputGenerator.
1123 # Generates a mock vulkan ICD.
1124 #  This is intended to be a minimal replacement for a vulkan device in order
1125 #  to enable Vulkan Validation testing.
1126 #
1127 # ---- methods ----
1128 # MockOutputGenerator(errFile, warnFile, diagFile) - args as for
1129 #   OutputGenerator. Defines additional internal state.
1130 # ---- methods overriding base class ----
1131 # beginFile(genOpts)
1132 # endFile()
1133 # beginFeature(interface, emit)
1134 # endFeature()
1135 # genType(typeinfo,name)
1136 # genStruct(typeinfo,name)
1137 # genGroup(groupinfo,name)
1138 # genEnum(enuminfo, name)
1139 # genCmd(cmdinfo)
1140 class MockICDOutputGenerator(OutputGenerator):
1141     """Generate specified API interfaces in a specific style, such as a C header"""
1142     # This is an ordered list of sections in the header file.
1143     TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
1144                      'group', 'bitmask', 'funcpointer', 'struct']
1145     ALL_SECTIONS = TYPE_SECTIONS + ['command']
1146     def __init__(self,
1147                  errFile = sys.stderr,
1148                  warnFile = sys.stderr,
1149                  diagFile = sys.stdout):
1150         OutputGenerator.__init__(self, errFile, warnFile, diagFile)
1151         # Internal state - accumulators for different inner block text
1152         self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1153         self.intercepts = []
1154
1155     # Check if the parameter passed in is a pointer to an array
1156     def paramIsArray(self, param):
1157         return param.attrib.get('len') is not None
1158
1159     # Check if the parameter passed in is a pointer
1160     def paramIsPointer(self, param):
1161         ispointer = False
1162         for elem in param:
1163             if ((elem.tag != 'type') and (elem.tail is not None)) and '*' in elem.tail:
1164                 ispointer = True
1165         return ispointer
1166
1167     # Check if an object is a non-dispatchable handle
1168     def isHandleTypeNonDispatchable(self, handletype):
1169         handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1170         if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
1171             return True
1172         else:
1173             return False
1174
1175     # Check if an object is a dispatchable handle
1176     def isHandleTypeDispatchable(self, handletype):
1177         handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1178         if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
1179             return True
1180         else:
1181             return False
1182
1183     def beginFile(self, genOpts):
1184         OutputGenerator.beginFile(self, genOpts)
1185         # C-specific
1186         #
1187         # Multiple inclusion protection & C++ namespace.
1188         self.header = False
1189         if (genOpts.protectFile and self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
1190             self.header = True
1191             headerSym = '__' + re.sub(r'\.h', '_h_', os.path.basename(self.genOpts.filename))
1192             write('#ifndef', headerSym, file=self.outFile)
1193             write('#define', headerSym, '1', file=self.outFile)
1194             self.newline()
1195         #
1196         # User-supplied prefix text, if any (list of strings)
1197         if (genOpts.prefixText):
1198             for s in genOpts.prefixText:
1199                 write(s, file=self.outFile)
1200         if self.header:
1201             write('#include <unordered_map>', file=self.outFile)
1202             write('#include <mutex>', file=self.outFile)
1203             write('#include <string>', file=self.outFile)
1204             write('#include <cstring>', file=self.outFile)
1205             write('#include "vulkan/vk_icd.h"', file=self.outFile)
1206         else:
1207             write('#include "mock_icd.h"', file=self.outFile)
1208             write('#include <stdlib.h>', file=self.outFile)
1209             write('#include <algorithm>', file=self.outFile)
1210             write('#include <array>', file=self.outFile)
1211             write('#include <vector>', file=self.outFile)
1212             write('#include "vk_typemap_helper.h"', file=self.outFile)
1213
1214         write('namespace vkmock {', file=self.outFile)
1215         if self.header:
1216             self.newline()
1217             write(HEADER_C_CODE, file=self.outFile)
1218             # Include all of the extensions in ICD except specific ignored ones
1219             device_exts = []
1220             instance_exts = []
1221             # Ignore extensions that ICDs should not implement or are not safe to report
1222             ignore_exts = ['VK_EXT_validation_cache', 'VK_KHR_portability_subset']
1223             for ext in self.registry.tree.findall("extensions/extension"):
1224                 if ext.attrib['supported'] != 'disabled': # Only include enabled extensions
1225                     if (ext.attrib['name'] not in ignore_exts):
1226                         # Search for extension version enum
1227                         for enum in ext.findall('require/enum'):
1228                             if enum.get('name', '').endswith('_SPEC_VERSION'):
1229                                 ext_version = enum.get('value')
1230                                 if (ext.attrib.get('type') == 'instance'):
1231                                     instance_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext_version))
1232                                 else:
1233                                     device_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext_version))
1234                                 break
1235
1236             write('// Map of instance extension name to version', file=self.outFile)
1237             write('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {', file=self.outFile)
1238             write('\n'.join(instance_exts), file=self.outFile)
1239             write('};', file=self.outFile)
1240             write('// Map of device extension name to version', file=self.outFile)
1241             write('static const std::unordered_map<std::string, uint32_t> device_extension_map = {', file=self.outFile)
1242             write('\n'.join(device_exts), file=self.outFile)
1243             write('};', file=self.outFile)
1244
1245         else:
1246             self.newline()
1247             write(SOURCE_CPP_PREFIX, file=self.outFile)
1248
1249     def endFile(self):
1250         # C-specific
1251         # Finish C++ namespace and multiple inclusion protection
1252         self.newline()
1253         if self.header:
1254             # record intercepted procedures
1255             write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
1256             write('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
1257             write('\n'.join(self.intercepts), file=self.outFile)
1258             write('};\n', file=self.outFile)
1259             self.newline()
1260             write('} // namespace vkmock', file=self.outFile)
1261             self.newline()
1262             write('#endif', file=self.outFile)
1263         else: # Loader-layer-interface, need to implement global interface functions
1264             write(SOURCE_CPP_POSTFIX, file=self.outFile)
1265         # Finish processing in superclass
1266         OutputGenerator.endFile(self)
1267     def beginFeature(self, interface, emit):
1268         #write('// starting beginFeature', file=self.outFile)
1269         # Start processing in superclass
1270         OutputGenerator.beginFeature(self, interface, emit)
1271         self.featureExtraProtect = GetFeatureProtect(interface)
1272         # C-specific
1273         # Accumulate includes, defines, types, enums, function pointer typedefs,
1274         # end function prototypes separately for this feature. They're only
1275         # printed in endFeature().
1276         self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1277         #write('// ending beginFeature', file=self.outFile)
1278     def endFeature(self):
1279         # C-specific
1280         # Actually write the interface to the output file.
1281         #write('// starting endFeature', file=self.outFile)
1282         if (self.emit):
1283             self.newline()
1284             if (self.genOpts.protectFeature):
1285                 write('#ifndef', self.featureName, file=self.outFile)
1286             # If type declarations are needed by other features based on
1287             # this one, it may be necessary to suppress the ExtraProtect,
1288             # or move it below the 'for section...' loop.
1289             #write('// endFeature looking at self.featureExtraProtect', file=self.outFile)
1290             if (self.featureExtraProtect != None):
1291                 write('#ifdef', self.featureExtraProtect, file=self.outFile)
1292             #write('#define', self.featureName, '1', file=self.outFile)
1293             for section in self.TYPE_SECTIONS:
1294                 #write('// endFeature writing section'+section, file=self.outFile)
1295                 contents = self.sections[section]
1296                 if contents:
1297                     write('\n'.join(contents), file=self.outFile)
1298                     self.newline()
1299             #write('// endFeature looking at self.sections[command]', file=self.outFile)
1300             if (self.sections['command']):
1301                 write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
1302                 self.newline()
1303             if (self.featureExtraProtect != None):
1304                 write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile)
1305             if (self.genOpts.protectFeature):
1306                 write('#endif /*', self.featureName, '*/', file=self.outFile)
1307         # Finish processing in superclass
1308         OutputGenerator.endFeature(self)
1309         #write('// ending endFeature', file=self.outFile)
1310     #
1311     # Append a definition to the specified section
1312     def appendSection(self, section, text):
1313         # self.sections[section].append('SECTION: ' + section + '\n')
1314         self.sections[section].append(text)
1315     #
1316     # Type generation
1317     def genType(self, typeinfo, name, alias):
1318         pass
1319     #
1320     # Struct (e.g. C "struct" type) generation.
1321     # This is a special case of the <type> tag where the contents are
1322     # interpreted as a set of <member> tags instead of freeform C
1323     # C type declarations. The <member> tags are just like <param>
1324     # tags - they are a declaration of a struct or union member.
1325     # Only simple member declarations are supported (no nested
1326     # structs etc.)
1327     def genStruct(self, typeinfo, typeName, alias):
1328         OutputGenerator.genStruct(self, typeinfo, typeName, alias)
1329         body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
1330         # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
1331         for member in typeinfo.elem.findall('.//member'):
1332             body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
1333             body += ';\n'
1334         body += '} ' + typeName + ';\n'
1335         self.appendSection('struct', body)
1336     #
1337     # Group (e.g. C "enum" type) generation.
1338     # These are concatenated together with other types.
1339     def genGroup(self, groupinfo, groupName, alias):
1340         pass
1341     # Enumerant generation
1342     # <enum> tags may specify their values in several ways, but are usually
1343     # just integers.
1344     def genEnum(self, enuminfo, name, alias):
1345         pass
1346     #
1347     # Command generation
1348     def genCmd(self, cmdinfo, name, alias):
1349         decls = self.makeCDecls(cmdinfo.elem)
1350         if self.header: # In the header declare all intercepts
1351             self.appendSection('command', '')
1352             self.appendSection('command', 'static %s' % (decls[0]))
1353             if (self.featureExtraProtect != None):
1354                 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1355             self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1356             if (self.featureExtraProtect != None):
1357                 self.intercepts += [ '#endif' ]
1358             return
1359
1360         manual_functions = [
1361             # Include functions here to be intercepted w/ manually implemented function bodies
1362             'vkGetDeviceProcAddr',
1363             'vkGetInstanceProcAddr',
1364             'vkCreateDevice',
1365             'vkDestroyDevice',
1366             'vkCreateInstance',
1367             'vkDestroyInstance',
1368             'vkFreeCommandBuffers',
1369             #'vkCreateDebugReportCallbackEXT',
1370             #'vkDestroyDebugReportCallbackEXT',
1371             'vkEnumerateInstanceLayerProperties',
1372             'vkEnumerateInstanceVersion',
1373             'vkEnumerateInstanceExtensionProperties',
1374             'vkEnumerateDeviceLayerProperties',
1375             'vkEnumerateDeviceExtensionProperties',
1376         ]
1377         if name in manual_functions:
1378             self.appendSection('command', '')
1379             if name not in CUSTOM_C_INTERCEPTS:
1380                 self.appendSection('command', '// declare only')
1381                 self.appendSection('command', 'static %s' % (decls[0]))
1382                 self.appendSection('command', '// TODO: Implement custom intercept body')
1383             else:
1384                 self.appendSection('command', 'static %s' % (decls[0][:-1]))
1385                 self.appendSection('command', '{\n%s}' % (CUSTOM_C_INTERCEPTS[name]))
1386             self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1387             return
1388         # record that the function will be intercepted
1389         if (self.featureExtraProtect != None):
1390             self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1391         self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1392         if (self.featureExtraProtect != None):
1393             self.intercepts += [ '#endif' ]
1394
1395         OutputGenerator.genCmd(self, cmdinfo, name, alias)
1396         #
1397         self.appendSection('command', '')
1398         self.appendSection('command', 'static %s' % (decls[0][:-1]))
1399         if name in CUSTOM_C_INTERCEPTS:
1400             self.appendSection('command', '{%s}' % (CUSTOM_C_INTERCEPTS[name]))
1401             return
1402
1403         # Declare result variable, if any.
1404         resulttype = cmdinfo.elem.find('proto/type')
1405         if (resulttype != None and resulttype.text == 'void'):
1406             resulttype = None
1407         # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
1408         # Call the KHR custom version instead of generating separate code
1409         khr_name = name + "KHR"
1410         if khr_name in CUSTOM_C_INTERCEPTS:
1411             return_string = ''
1412             if resulttype != None:
1413                 return_string = 'return '
1414             params = cmdinfo.elem.findall('param/name')
1415             param_names = []
1416             for param in params:
1417                 param_names.append(param.text)
1418             self.appendSection('command', '{\n    %s%s(%s);\n}' % (return_string, khr_name[2:], ", ".join(param_names)))
1419             return
1420         self.appendSection('command', '{')
1421
1422         api_function_name = cmdinfo.elem.attrib.get('name')
1423         # GET THE TYPE OF FUNCTION
1424         if True in [ftxt in api_function_name for ftxt in ['Create', 'Allocate']]:
1425             # Get last param
1426             last_param = cmdinfo.elem.findall('param')[-1]
1427             lp_txt = last_param.find('name').text
1428             lp_len = None
1429             if ('len' in last_param.attrib):
1430                 lp_len = last_param.attrib['len']
1431                 lp_len = lp_len.replace('::', '->')
1432             lp_type = last_param.find('type').text
1433             handle_type = 'dispatchable'
1434             allocator_txt = 'CreateDispObjHandle()';
1435             if (self.isHandleTypeNonDispatchable(lp_type)):
1436                 handle_type = 'non-' + handle_type
1437                 allocator_txt = 'global_unique_handle++';
1438             # Need to lock in both cases
1439             self.appendSection('command', '    unique_lock_t lock(global_lock);')
1440             if (lp_len != None):
1441                 #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
1442                 self.appendSection('command', '    for (uint32_t i = 0; i < %s; ++i) {' % (lp_len))
1443                 self.appendSection('command', '        %s[i] = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1444                 self.appendSection('command', '    }')
1445             else:
1446                 #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
1447                 if 'AllocateMemory' in api_function_name:
1448                     # Store allocation size in case it's mapped
1449                     self.appendSection('command', '    allocated_memory_size_map[(VkDeviceMemory)global_unique_handle] = pAllocateInfo->allocationSize;')
1450                 self.appendSection('command', '    *%s = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1451         elif True in [ftxt in api_function_name for ftxt in ['Destroy', 'Free']]:
1452             self.appendSection('command', '//Destroy object')
1453             if 'FreeMemory' in api_function_name:
1454                 # Remove from allocation map
1455                 self.appendSection('command', '    allocated_memory_size_map.erase(memory);')
1456         else:
1457             self.appendSection('command', '//Not a CREATE or DESTROY function')
1458
1459         # Return result variable, if any.
1460         if (resulttype != None):
1461             if api_function_name == 'vkGetEventStatus':
1462                 self.appendSection('command', '    return VK_EVENT_SET;')
1463             else:
1464                 self.appendSection('command', '    return VK_SUCCESS;')
1465         self.appendSection('command', '}')
1466     #
1467     # override makeProtoName to drop the "vk" prefix
1468     def makeProtoName(self, name, tail):
1469         return self.genOpts.apientry + name[2:] + tail