scripts: Use named params for generator options
[platform/upstream/Vulkan-Tools.git] / scripts / mock_icd_generator.py
1 #!/usr/bin/python3 -i
2 #
3 # Copyright (c) 2015-2017 The Khronos Group Inc.
4 # Copyright (c) 2015-2017 Valve Corporation
5 # Copyright (c) 2015-2017 LunarG, Inc.
6 # Copyright (c) 2015-2017 Google Inc.
7 #
8 # Licensed under the Apache License, Version 2.0 (the "License");
9 # you may not use this file except in compliance with the License.
10 # You may obtain a copy of the License at
11 #
12 #     http://www.apache.org/licenses/LICENSE-2.0
13 #
14 # Unless required by applicable law or agreed to in writing, software
15 # distributed under the License is distributed on an "AS IS" BASIS,
16 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # See the License for the specific language governing permissions and
18 # limitations under the License.
19 #
20 # Author: Tobin Ehlis <tobine@google.com>
21 #
22 # This script generates a Mock ICD that intercepts almost all Vulkan
23 #  functions. That layer is not intended to be useful or even compilable
24 #  in its initial state. Rather it's intended to be a starting point that
25 #  can be copied and customized to assist in creation of a new layer.
26
27 import os,re,sys
28 from generator import *
29 from common_codegen import *
30
31
32 # Mock header code
33 HEADER_C_CODE = '''
34 using mutex_t = std::mutex;
35 using lock_guard_t = std::lock_guard<mutex_t>;
36 using unique_lock_t = std::unique_lock<mutex_t>;
37
38 static mutex_t global_lock;
39 static uint64_t global_unique_handle = 1;
40 static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5;
41 static uint32_t loader_interface_version = 0;
42 static bool negotiate_loader_icd_interface_called = false;
43 static void* CreateDispObjHandle() {
44     auto handle = new VK_LOADER_DATA;
45     set_loader_magic_value(handle);
46     return handle;
47 }
48 static void DestroyDispObjHandle(void* handle) {
49     delete reinterpret_cast<VK_LOADER_DATA*>(handle);
50 }
51 '''
52
53 # Manual code at the top of the cpp source file
54 SOURCE_CPP_PREFIX = '''
55 using std::unordered_map;
56
57 static constexpr uint32_t icd_physical_device_count = 1;
58 static constexpr uint32_t kSupportedVulkanAPIVersion = VK_API_VERSION_1_1;
59 static unordered_map<VkInstance, std::array<VkPhysicalDevice, icd_physical_device_count>> physical_device_map;
60
61 // Map device memory handle to any mapped allocations that we'll need to free on unmap
62 static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map;
63
64 // Map device memory allocation handle to the size
65 static unordered_map<VkDeviceMemory, VkDeviceSize> allocated_memory_size_map;
66
67 static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map;
68 static unordered_map<VkDevice, unordered_map<VkBuffer, VkBufferCreateInfo>> buffer_map;
69 static unordered_map<VkDevice, unordered_map<VkImage, VkDeviceSize>> image_memory_size_map;
70
71 static constexpr uint32_t icd_swapchain_image_count = 1;
72 static unordered_map<VkSwapchainKHR, VkImage[icd_swapchain_image_count]> swapchain_image_map;
73
74 // TODO: Would like to codegen this but limits aren't in XML
75 static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) {
76     limits->maxImageDimension1D = 4096;
77     limits->maxImageDimension2D = 4096;
78     limits->maxImageDimension3D = 256;
79     limits->maxImageDimensionCube = 4096;
80     limits->maxImageArrayLayers = 256;
81     limits->maxTexelBufferElements = 65536;
82     limits->maxUniformBufferRange = 16384;
83     limits->maxStorageBufferRange = 134217728;
84     limits->maxPushConstantsSize = 128;
85     limits->maxMemoryAllocationCount = 4096;
86     limits->maxSamplerAllocationCount = 4000;
87     limits->bufferImageGranularity = 1;
88     limits->sparseAddressSpaceSize = 2147483648;
89     limits->maxBoundDescriptorSets = 4;
90     limits->maxPerStageDescriptorSamplers = 16;
91     limits->maxPerStageDescriptorUniformBuffers = 12;
92     limits->maxPerStageDescriptorStorageBuffers = 4;
93     limits->maxPerStageDescriptorSampledImages = 16;
94     limits->maxPerStageDescriptorStorageImages = 4;
95     limits->maxPerStageDescriptorInputAttachments = 4;
96     limits->maxPerStageResources = 128;
97     limits->maxDescriptorSetSamplers = 96;
98     limits->maxDescriptorSetUniformBuffers = 72;
99     limits->maxDescriptorSetUniformBuffersDynamic = 8;
100     limits->maxDescriptorSetStorageBuffers = 24;
101     limits->maxDescriptorSetStorageBuffersDynamic = 4;
102     limits->maxDescriptorSetSampledImages = 96;
103     limits->maxDescriptorSetStorageImages = 24;
104     limits->maxDescriptorSetInputAttachments = 4;
105     limits->maxVertexInputAttributes = 16;
106     limits->maxVertexInputBindings = 16;
107     limits->maxVertexInputAttributeOffset = 2047;
108     limits->maxVertexInputBindingStride = 2048;
109     limits->maxVertexOutputComponents = 64;
110     limits->maxTessellationGenerationLevel = 64;
111     limits->maxTessellationPatchSize = 32;
112     limits->maxTessellationControlPerVertexInputComponents = 64;
113     limits->maxTessellationControlPerVertexOutputComponents = 64;
114     limits->maxTessellationControlPerPatchOutputComponents = 120;
115     limits->maxTessellationControlTotalOutputComponents = 2048;
116     limits->maxTessellationEvaluationInputComponents = 64;
117     limits->maxTessellationEvaluationOutputComponents = 64;
118     limits->maxGeometryShaderInvocations = 32;
119     limits->maxGeometryInputComponents = 64;
120     limits->maxGeometryOutputComponents = 64;
121     limits->maxGeometryOutputVertices = 256;
122     limits->maxGeometryTotalOutputComponents = 1024;
123     limits->maxFragmentInputComponents = 64;
124     limits->maxFragmentOutputAttachments = 4;
125     limits->maxFragmentDualSrcAttachments = 1;
126     limits->maxFragmentCombinedOutputResources = 4;
127     limits->maxComputeSharedMemorySize = 16384;
128     limits->maxComputeWorkGroupCount[0] = 65535;
129     limits->maxComputeWorkGroupCount[1] = 65535;
130     limits->maxComputeWorkGroupCount[2] = 65535;
131     limits->maxComputeWorkGroupInvocations = 128;
132     limits->maxComputeWorkGroupSize[0] = 128;
133     limits->maxComputeWorkGroupSize[1] = 128;
134     limits->maxComputeWorkGroupSize[2] = 64;
135     limits->subPixelPrecisionBits = 4;
136     limits->subTexelPrecisionBits = 4;
137     limits->mipmapPrecisionBits = 4;
138     limits->maxDrawIndexedIndexValue = UINT32_MAX;
139     limits->maxDrawIndirectCount = UINT16_MAX;
140     limits->maxSamplerLodBias = 2.0f;
141     limits->maxSamplerAnisotropy = 16;
142     limits->maxViewports = 16;
143     limits->maxViewportDimensions[0] = 4096;
144     limits->maxViewportDimensions[1] = 4096;
145     limits->viewportBoundsRange[0] = -8192;
146     limits->viewportBoundsRange[1] = 8191;
147     limits->viewportSubPixelBits = 0;
148     limits->minMemoryMapAlignment = 64;
149     limits->minTexelBufferOffsetAlignment = 16;
150     limits->minUniformBufferOffsetAlignment = 16;
151     limits->minStorageBufferOffsetAlignment = 16;
152     limits->minTexelOffset = -8;
153     limits->maxTexelOffset = 7;
154     limits->minTexelGatherOffset = -8;
155     limits->maxTexelGatherOffset = 7;
156     limits->minInterpolationOffset = 0.0f;
157     limits->maxInterpolationOffset = 0.5f;
158     limits->subPixelInterpolationOffsetBits = 4;
159     limits->maxFramebufferWidth = 4096;
160     limits->maxFramebufferHeight = 4096;
161     limits->maxFramebufferLayers = 256;
162     limits->framebufferColorSampleCounts = 0x7F;
163     limits->framebufferDepthSampleCounts = 0x7F;
164     limits->framebufferStencilSampleCounts = 0x7F;
165     limits->framebufferNoAttachmentsSampleCounts = 0x7F;
166     limits->maxColorAttachments = 4;
167     limits->sampledImageColorSampleCounts = 0x7F;
168     limits->sampledImageIntegerSampleCounts = 0x7F;
169     limits->sampledImageDepthSampleCounts = 0x7F;
170     limits->sampledImageStencilSampleCounts = 0x7F;
171     limits->storageImageSampleCounts = 0x7F;
172     limits->maxSampleMaskWords = 1;
173     limits->timestampComputeAndGraphics = VK_TRUE;
174     limits->timestampPeriod = 1;
175     limits->maxClipDistances = 8;
176     limits->maxCullDistances = 8;
177     limits->maxCombinedClipAndCullDistances = 8;
178     limits->discreteQueuePriorities = 2;
179     limits->pointSizeRange[0] = 1.0f;
180     limits->pointSizeRange[1] = 64.0f;
181     limits->lineWidthRange[0] = 1.0f;
182     limits->lineWidthRange[1] = 8.0f;
183     limits->pointSizeGranularity = 1.0f;
184     limits->lineWidthGranularity = 1.0f;
185     limits->strictLines = VK_TRUE;
186     limits->standardSampleLocations = VK_TRUE;
187     limits->optimalBufferCopyOffsetAlignment = 1;
188     limits->optimalBufferCopyRowPitchAlignment = 1;
189     limits->nonCoherentAtomSize = 256;
190
191     return *limits;
192 }
193
194 void SetBoolArrayTrue(VkBool32* bool_array, uint32_t num_bools)
195 {
196     for (uint32_t i = 0; i < num_bools; ++i) {
197         bool_array[i] = VK_TRUE;
198     }
199 }
200 '''
201
202 # Manual code at the end of the cpp source file
203 SOURCE_CPP_POSTFIX = '''
204
205 static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
206     // TODO: This function should only care about physical device functions and return nullptr for other functions
207     const auto &item = name_to_funcptr_map.find(funcName);
208     if (item != name_to_funcptr_map.end()) {
209         return reinterpret_cast<PFN_vkVoidFunction>(item->second);
210     }
211     // Mock should intercept all functions so if we get here just return null
212     return nullptr;
213 }
214
215 } // namespace vkmock
216
217 #if defined(__GNUC__) && __GNUC__ >= 4
218 #define EXPORT __attribute__((visibility("default")))
219 #elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
220 #define EXPORT __attribute__((visibility("default")))
221 #else
222 #define EXPORT
223 #endif
224
225 extern "C" {
226
227 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) {
228     if (!vkmock::negotiate_loader_icd_interface_called) {
229         vkmock::loader_interface_version = 1;
230     }
231     return vkmock::GetInstanceProcAddr(instance, pName);
232 }
233
234 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) {
235     return vkmock::GetPhysicalDeviceProcAddr(instance, pName);
236 }
237
238 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) {
239     vkmock::negotiate_loader_icd_interface_called = true;
240     vkmock::loader_interface_version = *pSupportedVersion;
241     if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
242         *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION;
243     }
244     return VK_SUCCESS;
245 }
246
247
248 EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
249     VkInstance                                  instance,
250     VkSurfaceKHR                                surface,
251     const VkAllocationCallbacks*                pAllocator)
252 {
253     vkmock::DestroySurfaceKHR(instance, surface, pAllocator);
254 }
255
256 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
257     VkPhysicalDevice                            physicalDevice,
258     uint32_t                                    queueFamilyIndex,
259     VkSurfaceKHR                                surface,
260     VkBool32*                                   pSupported)
261 {
262     return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
263 }
264
265 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
266     VkPhysicalDevice                            physicalDevice,
267     VkSurfaceKHR                                surface,
268     VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities)
269 {
270     return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
271 }
272
273 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
274     VkPhysicalDevice                            physicalDevice,
275     VkSurfaceKHR                                surface,
276     uint32_t*                                   pSurfaceFormatCount,
277     VkSurfaceFormatKHR*                         pSurfaceFormats)
278 {
279     return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
280 }
281
282 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
283     VkPhysicalDevice                            physicalDevice,
284     VkSurfaceKHR                                surface,
285     uint32_t*                                   pPresentModeCount,
286     VkPresentModeKHR*                           pPresentModes)
287 {
288     return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
289 }
290
291 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
292     VkInstance                                  instance,
293     const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
294     const VkAllocationCallbacks*                pAllocator,
295     VkSurfaceKHR*                               pSurface)
296 {
297     return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
298 }
299
300 #ifdef VK_USE_PLATFORM_XLIB_KHR
301
302 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
303     VkInstance                                  instance,
304     const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
305     const VkAllocationCallbacks*                pAllocator,
306     VkSurfaceKHR*                               pSurface)
307 {
308     return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
309 }
310 #endif /* VK_USE_PLATFORM_XLIB_KHR */
311
312 #ifdef VK_USE_PLATFORM_XCB_KHR
313
314 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
315     VkInstance                                  instance,
316     const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
317     const VkAllocationCallbacks*                pAllocator,
318     VkSurfaceKHR*                               pSurface)
319 {
320     return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
321 }
322 #endif /* VK_USE_PLATFORM_XCB_KHR */
323
324 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
325
326 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
327     VkInstance                                  instance,
328     const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
329     const VkAllocationCallbacks*                pAllocator,
330     VkSurfaceKHR*                               pSurface)
331 {
332     return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
333 }
334 #endif /* VK_USE_PLATFORM_WAYLAND_KHR */
335
336 #ifdef VK_USE_PLATFORM_ANDROID_KHR
337
338 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
339     VkInstance                                  instance,
340     const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
341     const VkAllocationCallbacks*                pAllocator,
342     VkSurfaceKHR*                               pSurface)
343 {
344     return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
345 }
346 #endif /* VK_USE_PLATFORM_ANDROID_KHR */
347
348 #ifdef VK_USE_PLATFORM_WIN32_KHR
349
350 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
351     VkInstance                                  instance,
352     const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
353     const VkAllocationCallbacks*                pAllocator,
354     VkSurfaceKHR*                               pSurface)
355 {
356     return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
357 }
358 #endif /* VK_USE_PLATFORM_WIN32_KHR */
359
360 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
361     VkDevice                                    device,
362     VkSurfaceKHR                                surface,
363     VkDeviceGroupPresentModeFlagsKHR*           pModes)
364 {
365     return vkmock::GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
366 }
367
368 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
369     VkPhysicalDevice                            physicalDevice,
370     VkSurfaceKHR                                surface,
371     uint32_t*                                   pRectCount,
372     VkRect2D*                                   pRects)
373 {
374     return vkmock::GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
375 }
376
377 #ifdef VK_USE_PLATFORM_VI_NN
378
379 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
380     VkInstance                                  instance,
381     const VkViSurfaceCreateInfoNN*              pCreateInfo,
382     const VkAllocationCallbacks*                pAllocator,
383     VkSurfaceKHR*                               pSurface)
384 {
385     return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
386 }
387 #endif /* VK_USE_PLATFORM_VI_NN */
388
389 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
390     VkPhysicalDevice                            physicalDevice,
391     VkSurfaceKHR                                surface,
392     VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities)
393 {
394     return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
395 }
396
397 #ifdef VK_USE_PLATFORM_IOS_MVK
398
399 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
400     VkInstance                                  instance,
401     const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
402     const VkAllocationCallbacks*                pAllocator,
403     VkSurfaceKHR*                               pSurface)
404 {
405     return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
406 }
407 #endif /* VK_USE_PLATFORM_IOS_MVK */
408
409 #ifdef VK_USE_PLATFORM_MACOS_MVK
410
411 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
412     VkInstance                                  instance,
413     const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
414     const VkAllocationCallbacks*                pAllocator,
415     VkSurfaceKHR*                               pSurface)
416 {
417     return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
418 }
419 #endif /* VK_USE_PLATFORM_MACOS_MVK */
420
421 } // end extern "C"
422
423 '''
424
425 CUSTOM_C_INTERCEPTS = {
426 'vkCreateInstance': '''
427     // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
428     //  apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
429     //  ICD should behave as normal.
430     if (loader_interface_version <= 4) {
431         return VK_ERROR_INCOMPATIBLE_DRIVER;
432     }
433     *pInstance = (VkInstance)CreateDispObjHandle();
434     for (auto& physical_device : physical_device_map[*pInstance])
435         physical_device = (VkPhysicalDevice)CreateDispObjHandle();
436     // TODO: If emulating specific device caps, will need to add intelligence here
437     return VK_SUCCESS;
438 ''',
439 'vkDestroyInstance': '''
440     if (instance) {
441         for (const auto physical_device : physical_device_map.at(instance))
442             DestroyDispObjHandle((void*)physical_device);
443         physical_device_map.erase(instance);
444         DestroyDispObjHandle((void*)instance);
445     }
446 ''',
447 'vkEnumeratePhysicalDevices': '''
448     VkResult result_code = VK_SUCCESS;
449     if (pPhysicalDevices) {
450         const auto return_count = (std::min)(*pPhysicalDeviceCount, icd_physical_device_count);
451         for (uint32_t i = 0; i < return_count; ++i) pPhysicalDevices[i] = physical_device_map.at(instance)[i];
452         if (return_count < icd_physical_device_count) result_code = VK_INCOMPLETE;
453         *pPhysicalDeviceCount = return_count;
454     } else {
455         *pPhysicalDeviceCount = icd_physical_device_count;
456     }
457     return result_code;
458 ''',
459 'vkCreateDevice': '''
460     *pDevice = (VkDevice)CreateDispObjHandle();
461     // TODO: If emulating specific device caps, will need to add intelligence here
462     return VK_SUCCESS;
463 ''',
464 'vkDestroyDevice': '''
465     unique_lock_t lock(global_lock);
466     // First destroy sub-device objects
467     // Destroy Queues
468     for (auto dev_queue_map_pair : queue_map) {
469         for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) {
470             for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) {
471                 DestroyDispObjHandle((void*)index_queue_pair.second);
472             }
473         }
474     }
475     queue_map.clear();
476     buffer_map.erase(device);
477     image_memory_size_map.erase(device);
478     // Now destroy device
479     DestroyDispObjHandle((void*)device);
480     // TODO: If emulating specific device caps, will need to add intelligence here
481 ''',
482 'vkGetDeviceQueue': '''
483     unique_lock_t lock(global_lock);
484     auto queue = queue_map[device][queueFamilyIndex][queueIndex];
485     if (queue) {
486         *pQueue = queue;
487     } else {
488         *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
489     }
490     // TODO: If emulating specific device caps, will need to add intelligence here
491     return;
492 ''',
493 'vkGetDeviceQueue2': '''
494     GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
495     // TODO: Add further support for GetDeviceQueue2 features
496 ''',
497 'vkEnumerateInstanceLayerProperties': '''
498     return VK_SUCCESS;
499 ''',
500 'vkEnumerateInstanceVersion': '''
501     *pApiVersion = kSupportedVulkanAPIVersion;
502     return VK_SUCCESS;
503 ''',
504 'vkEnumerateDeviceLayerProperties': '''
505     return VK_SUCCESS;
506 ''',
507 'vkEnumerateInstanceExtensionProperties': '''
508     // If requesting number of extensions, return that
509     if (!pLayerName) {
510         if (!pProperties) {
511             *pPropertyCount = (uint32_t)instance_extension_map.size();
512         } else {
513             uint32_t i = 0;
514             for (const auto &name_ver_pair : instance_extension_map) {
515                 if (i == *pPropertyCount) {
516                     break;
517                 }
518                 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
519                 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
520                 pProperties[i].specVersion = name_ver_pair.second;
521                 ++i;
522             }
523             if (i != instance_extension_map.size()) {
524                 return VK_INCOMPLETE;
525             }
526         }
527     }
528     // If requesting extension properties, fill in data struct for number of extensions
529     return VK_SUCCESS;
530 ''',
531 'vkEnumerateDeviceExtensionProperties': '''
532     // If requesting number of extensions, return that
533     if (!pLayerName) {
534         if (!pProperties) {
535             *pPropertyCount = (uint32_t)device_extension_map.size();
536         } else {
537             uint32_t i = 0;
538             for (const auto &name_ver_pair : device_extension_map) {
539                 if (i == *pPropertyCount) {
540                     break;
541                 }
542                 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
543                 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
544                 pProperties[i].specVersion = name_ver_pair.second;
545                 ++i;
546             }
547             if (i != device_extension_map.size()) {
548                 return VK_INCOMPLETE;
549             }
550         }
551     }
552     // If requesting extension properties, fill in data struct for number of extensions
553     return VK_SUCCESS;
554 ''',
555 'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
556     // Currently always say that all present modes are supported
557     if (!pPresentModes) {
558         *pPresentModeCount = 6;
559     } else {
560         // Intentionally falling through and just filling however many modes are requested
561         switch(*pPresentModeCount) {
562         case 6:
563             pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
564             // fall through
565         case 5:
566             pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
567             // fall through
568         case 4:
569             pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
570             // fall through
571         case 3:
572             pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
573             // fall through
574         case 2:
575             pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
576             // fall through
577         default:
578             pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
579             break;
580         }
581     }
582     return VK_SUCCESS;
583 ''',
584 'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
585     // Currently always say that RGBA8 & BGRA8 are supported
586     if (!pSurfaceFormats) {
587         *pSurfaceFormatCount = 2;
588     } else {
589         // Intentionally falling through and just filling however many types are requested
590         switch(*pSurfaceFormatCount) {
591         case 2:
592             pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
593             pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
594             // fall through
595         default:
596             pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
597             pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
598             break;
599         }
600     }
601     return VK_SUCCESS;
602 ''',
603 'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
604     // Currently always say that RGBA8 & BGRA8 are supported
605     if (!pSurfaceFormats) {
606         *pSurfaceFormatCount = 2;
607     } else {
608         // Intentionally falling through and just filling however many types are requested
609         switch(*pSurfaceFormatCount) {
610         case 2:
611             pSurfaceFormats[1].pNext = nullptr;
612             pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
613             pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
614             // fall through
615         default:
616             pSurfaceFormats[1].pNext = nullptr;
617             pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
618             pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
619             break;
620         }
621     }
622     return VK_SUCCESS;
623 ''',
624 'vkGetPhysicalDeviceSurfaceSupportKHR': '''
625     // Currently say that all surface/queue combos are supported
626     *pSupported = VK_TRUE;
627     return VK_SUCCESS;
628 ''',
629 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
630     // In general just say max supported is available for requested surface
631     pSurfaceCapabilities->minImageCount = 1;
632     pSurfaceCapabilities->maxImageCount = 0;
633     pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
634     pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
635     pSurfaceCapabilities->minImageExtent.width = 1;
636     pSurfaceCapabilities->minImageExtent.height = 1;
637     pSurfaceCapabilities->maxImageExtent.width = 3840;
638     pSurfaceCapabilities->maxImageExtent.height = 2160;
639     pSurfaceCapabilities->maxImageArrayLayers = 128;
640     pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
641                                                 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
642                                                 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
643                                                 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
644                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
645                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
646                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
647                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
648                                                 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
649     pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
650     pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
651                                                     VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
652                                                     VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
653                                                     VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
654     pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
655                                                 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
656                                                 VK_IMAGE_USAGE_SAMPLED_BIT |
657                                                 VK_IMAGE_USAGE_STORAGE_BIT |
658                                                 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
659                                                 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
660                                                 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
661                                                 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
662     return VK_SUCCESS;
663 ''',
664 'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
665     GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
666     return VK_SUCCESS;
667 ''',
668 'vkGetInstanceProcAddr': '''
669     if (!negotiate_loader_icd_interface_called) {
670         loader_interface_version = 0;
671     }
672     const auto &item = name_to_funcptr_map.find(pName);
673     if (item != name_to_funcptr_map.end()) {
674         return reinterpret_cast<PFN_vkVoidFunction>(item->second);
675     }
676     // Mock should intercept all functions so if we get here just return null
677     return nullptr;
678 ''',
679 'vkGetDeviceProcAddr': '''
680     return GetInstanceProcAddr(nullptr, pName);
681 ''',
682 'vkGetPhysicalDeviceMemoryProperties': '''
683     pMemoryProperties->memoryTypeCount = 2;
684     pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
685     pMemoryProperties->memoryTypes[0].heapIndex = 0;
686     pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
687     pMemoryProperties->memoryTypes[1].heapIndex = 1;
688     pMemoryProperties->memoryHeapCount = 2;
689     pMemoryProperties->memoryHeaps[0].flags = 0;
690     pMemoryProperties->memoryHeaps[0].size = 8000000000;
691     pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
692     pMemoryProperties->memoryHeaps[1].size = 8000000000;
693 ''',
694 'vkGetPhysicalDeviceMemoryProperties2KHR': '''
695     GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
696 ''',
697 'vkGetPhysicalDeviceQueueFamilyProperties': '''
698     if (!pQueueFamilyProperties) {
699         *pQueueFamilyPropertyCount = 1;
700     } else {
701         if (*pQueueFamilyPropertyCount) {
702             pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
703             pQueueFamilyProperties[0].queueCount = 1;
704             pQueueFamilyProperties[0].timestampValidBits = 0;
705             pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1};
706         }
707     }
708 ''',
709 'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
710     if (pQueueFamilyPropertyCount && pQueueFamilyProperties) {
711         GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties);
712     } else {
713         GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr);
714     }
715 ''',
716 'vkGetPhysicalDeviceFeatures': '''
717     uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
718     VkBool32 *bool_array = &pFeatures->robustBufferAccess;
719     SetBoolArrayTrue(bool_array, num_bools);
720 ''',
721 'vkGetPhysicalDeviceFeatures2KHR': '''
722     GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
723     uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
724     VkBool32* feat_bools = nullptr;
725     const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
726     if (desc_idx_features) {
727         const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
728         num_bools = bool_size/sizeof(VkBool32);
729         feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
730         SetBoolArrayTrue(feat_bools, num_bools);
731     }
732     const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
733     if (blendop_features) {
734         const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
735         num_bools = bool_size/sizeof(VkBool32);
736         feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
737         SetBoolArrayTrue(feat_bools, num_bools);
738     }
739 ''',
740 'vkGetPhysicalDeviceFormatProperties': '''
741     if (VK_FORMAT_UNDEFINED == format) {
742         *pFormatProperties = { 0x0, 0x0, 0x0 };
743     } else {
744         // TODO: Just returning full support for everything initially
745         *pFormatProperties = { 0x00FFFFFF, 0x00FFFFFF, 0x00FFFFFF };
746     }
747 ''',
748 'vkGetPhysicalDeviceFormatProperties2KHR': '''
749     GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
750 ''',
751 'vkGetPhysicalDeviceImageFormatProperties': '''
752     // A hardcoded unsupported format
753     if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
754         return VK_ERROR_FORMAT_NOT_SUPPORTED;
755     }
756
757     // TODO: Just hard-coding some values for now
758     // TODO: If tiling is linear, limit the mips, levels, & sample count
759     if (VK_IMAGE_TILING_LINEAR == tiling) {
760         *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
761     } else {
762         // We hard-code support for all sample counts except 64 bits.
763         *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
764     }
765     return VK_SUCCESS;
766 ''',
767 'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
768     GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
769     return VK_SUCCESS;
770 ''',
771 'vkGetPhysicalDeviceProperties': '''
772     // TODO: Just hard-coding some values for now
773     pProperties->apiVersion = kSupportedVulkanAPIVersion;
774     pProperties->driverVersion = 1;
775     pProperties->vendorID = 0xba5eba11;
776     pProperties->deviceID = 0xf005ba11;
777     pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
778     //std::string devName = "Vulkan Mock Device";
779     strcpy(pProperties->deviceName, "Vulkan Mock Device");
780     pProperties->pipelineCacheUUID[0] = 18;
781     pProperties->limits = SetLimits(&pProperties->limits);
782     pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
783 ''',
784 'vkGetPhysicalDeviceProperties2KHR': '''
785     GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
786     const auto *desc_idx_props = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>(pProperties->pNext);
787     if (desc_idx_props) {
788         VkPhysicalDeviceDescriptorIndexingPropertiesEXT* write_props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)desc_idx_props;
789         write_props->maxUpdateAfterBindDescriptorsInAllPools = 500000;
790         write_props->shaderUniformBufferArrayNonUniformIndexingNative = false;
791         write_props->shaderSampledImageArrayNonUniformIndexingNative = false;
792         write_props->shaderStorageBufferArrayNonUniformIndexingNative = false;
793         write_props->shaderStorageImageArrayNonUniformIndexingNative = false;
794         write_props->shaderInputAttachmentArrayNonUniformIndexingNative = false;
795         write_props->robustBufferAccessUpdateAfterBind = true;
796         write_props->quadDivergentImplicitLod = true;
797         write_props->maxPerStageDescriptorUpdateAfterBindSamplers = 500000;
798         write_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers = 500000;
799         write_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers = 500000;
800         write_props->maxPerStageDescriptorUpdateAfterBindSampledImages = 500000;
801         write_props->maxPerStageDescriptorUpdateAfterBindStorageImages = 500000;
802         write_props->maxPerStageDescriptorUpdateAfterBindInputAttachments = 500000;
803         write_props->maxPerStageUpdateAfterBindResources = 500000;
804         write_props->maxDescriptorSetUpdateAfterBindSamplers = 500000;
805         write_props->maxDescriptorSetUpdateAfterBindUniformBuffers = 96;
806         write_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 8;
807         write_props->maxDescriptorSetUpdateAfterBindStorageBuffers = 500000;
808         write_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 4;
809         write_props->maxDescriptorSetUpdateAfterBindSampledImages = 500000;
810         write_props->maxDescriptorSetUpdateAfterBindStorageImages = 500000;
811         write_props->maxDescriptorSetUpdateAfterBindInputAttachments = 500000;
812     }
813
814     const auto *push_descriptor_props = lvl_find_in_chain<VkPhysicalDevicePushDescriptorPropertiesKHR>(pProperties->pNext);
815     if (push_descriptor_props) {
816         VkPhysicalDevicePushDescriptorPropertiesKHR* write_props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)push_descriptor_props;
817         write_props->maxPushDescriptors = 256;
818     }
819
820     const auto *depth_stencil_resolve_props = lvl_find_in_chain<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>(pProperties->pNext);
821     if (depth_stencil_resolve_props) {
822         VkPhysicalDeviceDepthStencilResolvePropertiesKHR* write_props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)depth_stencil_resolve_props;
823         write_props->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
824         write_props->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
825     }
826 ''',
827 'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
828     // Hard code support for all handle types and features
829     pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
830     pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
831     pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
832 ''',
833 'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
834     GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
835 ''',
836 'vkGetPhysicalDeviceExternalFenceProperties':'''
837     // Hard-code support for all handle types and features
838     pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
839     pExternalFenceProperties->compatibleHandleTypes = 0xF;
840     pExternalFenceProperties->externalFenceFeatures = 0x3;
841 ''',
842 'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
843     GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
844 ''',
845 'vkGetPhysicalDeviceExternalBufferProperties':'''
846     // Hard-code support for all handle types and features
847     pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
848     pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0x1FF;
849     pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0x1FF;
850 ''',
851 'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
852     GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
853 ''',
854 'vkGetBufferMemoryRequirements': '''
855     // TODO: Just hard-coding reqs for now
856     pMemoryRequirements->size = 4096;
857     pMemoryRequirements->alignment = 1;
858     pMemoryRequirements->memoryTypeBits = 0xFFFF;
859     // Return a better size based on the buffer size from the create info.
860     auto d_iter = buffer_map.find(device);
861     if (d_iter != buffer_map.end()) {
862         auto iter = d_iter->second.find(buffer);
863         if (iter != d_iter->second.end()) {
864             pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
865         }
866     }
867 ''',
868 'vkGetBufferMemoryRequirements2KHR': '''
869     GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
870 ''',
871 'vkGetImageMemoryRequirements': '''
872     pMemoryRequirements->size = 0;
873     pMemoryRequirements->alignment = 1;
874
875     auto d_iter = image_memory_size_map.find(device);
876     if(d_iter != image_memory_size_map.end()){
877         auto iter = d_iter->second.find(image);
878         if (iter != d_iter->second.end()) {
879             pMemoryRequirements->size = iter->second;
880         }
881     }
882     // Here we hard-code that the memory type at index 3 doesn't support this image.
883     pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
884 ''',
885 'vkGetImageMemoryRequirements2KHR': '''
886     GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
887 ''',
888 'vkMapMemory': '''
889     unique_lock_t lock(global_lock);
890     if (VK_WHOLE_SIZE == size) {
891         if (allocated_memory_size_map.count(memory) != 0)
892             size = allocated_memory_size_map[memory] - offset;
893         else
894             size = 0x10000;
895     }
896     void* map_addr = malloc((size_t)size);
897     mapped_memory_map[memory].push_back(map_addr);
898     *ppData = map_addr;
899     return VK_SUCCESS;
900 ''',
901 'vkUnmapMemory': '''
902     unique_lock_t lock(global_lock);
903     for (auto map_addr : mapped_memory_map[memory]) {
904         free(map_addr);
905     }
906     mapped_memory_map.erase(memory);
907 ''',
908 'vkGetImageSubresourceLayout': '''
909     // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure.
910     *pLayout = VkSubresourceLayout(); // Default constructor zero values.
911 ''',
912 'vkCreateSwapchainKHR': '''
913     unique_lock_t lock(global_lock);
914     *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
915     for(uint32_t i = 0; i < icd_swapchain_image_count; ++i){
916         swapchain_image_map[*pSwapchain][i] = (VkImage)global_unique_handle++;
917     }
918     return VK_SUCCESS;
919 ''',
920 'vkDestroySwapchainKHR': '''
921     unique_lock_t lock(global_lock);
922     swapchain_image_map.clear();
923 ''',
924 'vkGetSwapchainImagesKHR': '''
925     if (!pSwapchainImages) {
926         *pSwapchainImageCount = icd_swapchain_image_count;
927     } else {
928         unique_lock_t lock(global_lock);
929         for (uint32_t img_i = 0; img_i < (std::min)(*pSwapchainImageCount, icd_swapchain_image_count); ++img_i){
930             pSwapchainImages[img_i] = swapchain_image_map.at(swapchain)[img_i];
931         }
932
933         if (*pSwapchainImageCount < icd_swapchain_image_count) return VK_INCOMPLETE;
934         else if (*pSwapchainImageCount > icd_swapchain_image_count) *pSwapchainImageCount = icd_swapchain_image_count;
935     }
936     return VK_SUCCESS;
937 ''',
938 'vkAcquireNextImageKHR': '''
939     *pImageIndex = 0;
940     return VK_SUCCESS;
941 ''',
942 'vkAcquireNextImage2KHR': '''
943     *pImageIndex = 0;
944     return VK_SUCCESS;
945 ''',
946 'vkCreateBuffer': '''
947     unique_lock_t lock(global_lock);
948     *pBuffer = (VkBuffer)global_unique_handle++;
949     buffer_map[device][*pBuffer] = *pCreateInfo;
950     return VK_SUCCESS;
951 ''',
952 'vkDestroyBuffer': '''
953     unique_lock_t lock(global_lock);
954     buffer_map[device].erase(buffer);
955 ''',
956 'vkCreateImage': '''
957     unique_lock_t lock(global_lock);
958     *pImage = (VkImage)global_unique_handle++;
959     // TODO: A pixel size is 32 bytes. This accounts for the largest possible pixel size of any format. It could be changed to more accurate size if need be.
960     image_memory_size_map[device][*pImage] = pCreateInfo->extent.width * pCreateInfo->extent.height * pCreateInfo->extent.depth *
961                                              32 * pCreateInfo->arrayLayers * (pCreateInfo->mipLevels > 1 ? 2 : 1);
962     // plane count
963     switch (pCreateInfo->format) {
964         case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
965         case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
966         case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
967         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
968         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
969         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
970         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
971         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
972         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
973         case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
974         case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
975         case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
976             image_memory_size_map[device][*pImage] *= 3;
977             break;
978         case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
979         case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
980         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
981         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
982         case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
983         case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
984         case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
985         case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
986             image_memory_size_map[device][*pImage] *= 2;
987             break;
988         default:
989             break;
990     }
991     return VK_SUCCESS;
992 ''',
993 'vkDestroyImage': '''
994     unique_lock_t lock(global_lock);
995     image_memory_size_map[device].erase(image);
996 ''',
997 }
998
999 # MockICDGeneratorOptions - subclass of GeneratorOptions.
1000 #
1001 # Adds options used by MockICDOutputGenerator objects during Mock
1002 # ICD generation.
1003 #
1004 # Additional members
1005 #   prefixText - list of strings to prefix generated header with
1006 #     (usually a copyright statement + calling convention macros).
1007 #   protectFile - True if multiple inclusion protection should be
1008 #     generated (based on the filename) around the entire header.
1009 #   protectFeature - True if #ifndef..#endif protection should be
1010 #     generated around a feature interface in the header file.
1011 #   genFuncPointers - True if function pointer typedefs should be
1012 #     generated
1013 #   protectProto - If conditional protection should be generated
1014 #     around prototype declarations, set to either '#ifdef'
1015 #     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
1016 #     to require opt-out (#ifndef protectProtoStr). Otherwise
1017 #     set to None.
1018 #   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
1019 #     declarations, if protectProto is set
1020 #   apicall - string to use for the function declaration prefix,
1021 #     such as APICALL on Windows.
1022 #   apientry - string to use for the calling convention macro,
1023 #     in typedefs, such as APIENTRY.
1024 #   apientryp - string to use for the calling convention macro
1025 #     in function pointer typedefs, such as APIENTRYP.
1026 #   indentFuncProto - True if prototype declarations should put each
1027 #     parameter on a separate line
1028 #   indentFuncPointer - True if typedefed function pointers should put each
1029 #     parameter on a separate line
1030 #   alignFuncParam - if nonzero and parameters are being put on a
1031 #     separate line, align parameter names at the specified column
1032 class MockICDGeneratorOptions(GeneratorOptions):
1033     def __init__(self,
1034                  conventions = None,
1035                  filename = None,
1036                  directory = '.',
1037                  apiname = None,
1038                  profile = None,
1039                  versions = '.*',
1040                  emitversions = '.*',
1041                  defaultExtensions = None,
1042                  addExtensions = None,
1043                  removeExtensions = None,
1044                  emitExtensions = None,
1045                  sortProcedure = regSortFeatures,
1046                  prefixText = "",
1047                  genFuncPointers = True,
1048                  protectFile = True,
1049                  protectFeature = True,
1050                  protectProto = None,
1051                  protectProtoStr = None,
1052                  apicall = '',
1053                  apientry = '',
1054                  apientryp = '',
1055                  indentFuncProto = True,
1056                  indentFuncPointer = False,
1057                  alignFuncParam = 0,
1058                  expandEnumerants = True,
1059                  helper_file_type = ''):
1060         GeneratorOptions.__init__(self,
1061                  conventions = conventions,
1062                  filename = filename,
1063                  directory = directory,
1064                  apiname = apiname,
1065                  profile = profile,
1066                  versions = versions,
1067                  emitversions = emitversions,
1068                  defaultExtensions = defaultExtensions,
1069                  addExtensions = addExtensions,
1070                  removeExtensions = removeExtensions,
1071                  emitExtensions = emitExtensions,
1072                  sortProcedure = sortProcedure)
1073         self.prefixText      = prefixText
1074         self.genFuncPointers = genFuncPointers
1075         self.protectFile     = protectFile
1076         self.protectFeature  = protectFeature
1077         self.protectProto    = protectProto
1078         self.protectProtoStr = protectProtoStr
1079         self.apicall         = apicall
1080         self.apientry        = apientry
1081         self.apientryp       = apientryp
1082         self.indentFuncProto = indentFuncProto
1083         self.indentFuncPointer = indentFuncPointer
1084         self.alignFuncParam  = alignFuncParam
1085
1086 # MockICDOutputGenerator - subclass of OutputGenerator.
1087 # Generates a mock vulkan ICD.
1088 #  This is intended to be a minimal replacement for a vulkan device in order
1089 #  to enable Vulkan Validation testing.
1090 #
1091 # ---- methods ----
1092 # MockOutputGenerator(errFile, warnFile, diagFile) - args as for
1093 #   OutputGenerator. Defines additional internal state.
1094 # ---- methods overriding base class ----
1095 # beginFile(genOpts)
1096 # endFile()
1097 # beginFeature(interface, emit)
1098 # endFeature()
1099 # genType(typeinfo,name)
1100 # genStruct(typeinfo,name)
1101 # genGroup(groupinfo,name)
1102 # genEnum(enuminfo, name)
1103 # genCmd(cmdinfo)
1104 class MockICDOutputGenerator(OutputGenerator):
1105     """Generate specified API interfaces in a specific style, such as a C header"""
1106     # This is an ordered list of sections in the header file.
1107     TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
1108                      'group', 'bitmask', 'funcpointer', 'struct']
1109     ALL_SECTIONS = TYPE_SECTIONS + ['command']
1110     def __init__(self,
1111                  errFile = sys.stderr,
1112                  warnFile = sys.stderr,
1113                  diagFile = sys.stdout):
1114         OutputGenerator.__init__(self, errFile, warnFile, diagFile)
1115         # Internal state - accumulators for different inner block text
1116         self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1117         self.intercepts = []
1118
1119     # Check if the parameter passed in is a pointer to an array
1120     def paramIsArray(self, param):
1121         return param.attrib.get('len') is not None
1122
1123     # Check if the parameter passed in is a pointer
1124     def paramIsPointer(self, param):
1125         ispointer = False
1126         for elem in param:
1127             if ((elem.tag != 'type') and (elem.tail is not None)) and '*' in elem.tail:
1128                 ispointer = True
1129         return ispointer
1130
1131     # Check if an object is a non-dispatchable handle
1132     def isHandleTypeNonDispatchable(self, handletype):
1133         handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1134         if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
1135             return True
1136         else:
1137             return False
1138
1139     # Check if an object is a dispatchable handle
1140     def isHandleTypeDispatchable(self, handletype):
1141         handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1142         if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
1143             return True
1144         else:
1145             return False
1146
1147     def beginFile(self, genOpts):
1148         OutputGenerator.beginFile(self, genOpts)
1149         # C-specific
1150         #
1151         # Multiple inclusion protection & C++ namespace.
1152         self.header = False
1153         if (genOpts.protectFile and self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
1154             self.header = True
1155             headerSym = '__' + re.sub(r'\.h', '_h_', os.path.basename(self.genOpts.filename))
1156             write('#ifndef', headerSym, file=self.outFile)
1157             write('#define', headerSym, '1', file=self.outFile)
1158             self.newline()
1159         #
1160         # User-supplied prefix text, if any (list of strings)
1161         if (genOpts.prefixText):
1162             for s in genOpts.prefixText:
1163                 write(s, file=self.outFile)
1164         if self.header:
1165             write('#include <unordered_map>', file=self.outFile)
1166             write('#include <mutex>', file=self.outFile)
1167             write('#include <string>', file=self.outFile)
1168             write('#include <cstring>', file=self.outFile)
1169             write('#include "vulkan/vk_icd.h"', file=self.outFile)
1170         else:
1171             write('#include "mock_icd.h"', file=self.outFile)
1172             write('#include <stdlib.h>', file=self.outFile)
1173             write('#include <algorithm>', file=self.outFile)
1174             write('#include <array>', file=self.outFile)
1175             write('#include <vector>', file=self.outFile)
1176             write('#include "vk_typemap_helper.h"', file=self.outFile)
1177
1178         write('namespace vkmock {', file=self.outFile)
1179         if self.header:
1180             self.newline()
1181             write(HEADER_C_CODE, file=self.outFile)
1182             # Include all of the extensions in ICD except specific ignored ones
1183             device_exts = []
1184             instance_exts = []
1185             # Ignore extensions that ICDs should not implement or are not safe to report
1186             ignore_exts = ['VK_EXT_validation_cache']
1187             for ext in self.registry.tree.findall("extensions/extension"):
1188                 if ext.attrib['supported'] != 'disabled': # Only include enabled extensions
1189                     if (ext.attrib['name'] in ignore_exts):
1190                         pass
1191                     elif (ext.attrib.get('type') and 'instance' == ext.attrib['type']):
1192                         instance_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1193                     else:
1194                         device_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1195             write('// Map of instance extension name to version', file=self.outFile)
1196             write('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {', file=self.outFile)
1197             write('\n'.join(instance_exts), file=self.outFile)
1198             write('};', file=self.outFile)
1199             write('// Map of device extension name to version', file=self.outFile)
1200             write('static const std::unordered_map<std::string, uint32_t> device_extension_map = {', file=self.outFile)
1201             write('\n'.join(device_exts), file=self.outFile)
1202             write('};', file=self.outFile)
1203
1204         else:
1205             self.newline()
1206             write(SOURCE_CPP_PREFIX, file=self.outFile)
1207
1208     def endFile(self):
1209         # C-specific
1210         # Finish C++ namespace and multiple inclusion protection
1211         self.newline()
1212         if self.header:
1213             # record intercepted procedures
1214             write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
1215             write('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
1216             write('\n'.join(self.intercepts), file=self.outFile)
1217             write('};\n', file=self.outFile)
1218             self.newline()
1219             write('} // namespace vkmock', file=self.outFile)
1220             self.newline()
1221             write('#endif', file=self.outFile)
1222         else: # Loader-layer-interface, need to implement global interface functions
1223             write(SOURCE_CPP_POSTFIX, file=self.outFile)
1224         # Finish processing in superclass
1225         OutputGenerator.endFile(self)
1226     def beginFeature(self, interface, emit):
1227         #write('// starting beginFeature', file=self.outFile)
1228         # Start processing in superclass
1229         OutputGenerator.beginFeature(self, interface, emit)
1230         self.featureExtraProtect = GetFeatureProtect(interface)
1231         # C-specific
1232         # Accumulate includes, defines, types, enums, function pointer typedefs,
1233         # end function prototypes separately for this feature. They're only
1234         # printed in endFeature().
1235         self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1236         #write('// ending beginFeature', file=self.outFile)
1237     def endFeature(self):
1238         # C-specific
1239         # Actually write the interface to the output file.
1240         #write('// starting endFeature', file=self.outFile)
1241         if (self.emit):
1242             self.newline()
1243             if (self.genOpts.protectFeature):
1244                 write('#ifndef', self.featureName, file=self.outFile)
1245             # If type declarations are needed by other features based on
1246             # this one, it may be necessary to suppress the ExtraProtect,
1247             # or move it below the 'for section...' loop.
1248             #write('// endFeature looking at self.featureExtraProtect', file=self.outFile)
1249             if (self.featureExtraProtect != None):
1250                 write('#ifdef', self.featureExtraProtect, file=self.outFile)
1251             #write('#define', self.featureName, '1', file=self.outFile)
1252             for section in self.TYPE_SECTIONS:
1253                 #write('// endFeature writing section'+section, file=self.outFile)
1254                 contents = self.sections[section]
1255                 if contents:
1256                     write('\n'.join(contents), file=self.outFile)
1257                     self.newline()
1258             #write('// endFeature looking at self.sections[command]', file=self.outFile)
1259             if (self.sections['command']):
1260                 write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
1261                 self.newline()
1262             if (self.featureExtraProtect != None):
1263                 write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile)
1264             if (self.genOpts.protectFeature):
1265                 write('#endif /*', self.featureName, '*/', file=self.outFile)
1266         # Finish processing in superclass
1267         OutputGenerator.endFeature(self)
1268         #write('// ending endFeature', file=self.outFile)
1269     #
1270     # Append a definition to the specified section
1271     def appendSection(self, section, text):
1272         # self.sections[section].append('SECTION: ' + section + '\n')
1273         self.sections[section].append(text)
1274     #
1275     # Type generation
1276     def genType(self, typeinfo, name, alias):
1277         pass
1278     #
1279     # Struct (e.g. C "struct" type) generation.
1280     # This is a special case of the <type> tag where the contents are
1281     # interpreted as a set of <member> tags instead of freeform C
1282     # C type declarations. The <member> tags are just like <param>
1283     # tags - they are a declaration of a struct or union member.
1284     # Only simple member declarations are supported (no nested
1285     # structs etc.)
1286     def genStruct(self, typeinfo, typeName, alias):
1287         OutputGenerator.genStruct(self, typeinfo, typeName, alias)
1288         body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
1289         # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
1290         for member in typeinfo.elem.findall('.//member'):
1291             body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
1292             body += ';\n'
1293         body += '} ' + typeName + ';\n'
1294         self.appendSection('struct', body)
1295     #
1296     # Group (e.g. C "enum" type) generation.
1297     # These are concatenated together with other types.
1298     def genGroup(self, groupinfo, groupName, alias):
1299         pass
1300     # Enumerant generation
1301     # <enum> tags may specify their values in several ways, but are usually
1302     # just integers.
1303     def genEnum(self, enuminfo, name, alias):
1304         pass
1305     #
1306     # Command generation
1307     def genCmd(self, cmdinfo, name, alias):
1308         decls = self.makeCDecls(cmdinfo.elem)
1309         if self.header: # In the header declare all intercepts
1310             self.appendSection('command', '')
1311             self.appendSection('command', 'static %s' % (decls[0]))
1312             if (self.featureExtraProtect != None):
1313                 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1314             self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1315             if (self.featureExtraProtect != None):
1316                 self.intercepts += [ '#endif' ]
1317             return
1318
1319         manual_functions = [
1320             # Include functions here to be intercepted w/ manually implemented function bodies
1321             'vkGetDeviceProcAddr',
1322             'vkGetInstanceProcAddr',
1323             'vkCreateDevice',
1324             'vkDestroyDevice',
1325             'vkCreateInstance',
1326             'vkDestroyInstance',
1327             #'vkCreateDebugReportCallbackEXT',
1328             #'vkDestroyDebugReportCallbackEXT',
1329             'vkEnumerateInstanceLayerProperties',
1330             'vkEnumerateInstanceVersion',
1331             'vkEnumerateInstanceExtensionProperties',
1332             'vkEnumerateDeviceLayerProperties',
1333             'vkEnumerateDeviceExtensionProperties',
1334         ]
1335         if name in manual_functions:
1336             self.appendSection('command', '')
1337             if name not in CUSTOM_C_INTERCEPTS:
1338                 self.appendSection('command', '// declare only')
1339                 self.appendSection('command', 'static %s' % (decls[0]))
1340                 self.appendSection('command', '// TODO: Implement custom intercept body')
1341             else:
1342                 self.appendSection('command', 'static %s' % (decls[0][:-1]))
1343                 self.appendSection('command', '{\n%s}' % (CUSTOM_C_INTERCEPTS[name]))
1344             self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1345             return
1346         # record that the function will be intercepted
1347         if (self.featureExtraProtect != None):
1348             self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1349         self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1350         if (self.featureExtraProtect != None):
1351             self.intercepts += [ '#endif' ]
1352
1353         OutputGenerator.genCmd(self, cmdinfo, name, alias)
1354         #
1355         self.appendSection('command', '')
1356         self.appendSection('command', 'static %s' % (decls[0][:-1]))
1357         if name in CUSTOM_C_INTERCEPTS:
1358             self.appendSection('command', '{%s}' % (CUSTOM_C_INTERCEPTS[name]))
1359             return
1360
1361         # Declare result variable, if any.
1362         resulttype = cmdinfo.elem.find('proto/type')
1363         if (resulttype != None and resulttype.text == 'void'):
1364             resulttype = None
1365         # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
1366         # Call the KHR custom version instead of generating separate code
1367         khr_name = name + "KHR"
1368         if khr_name in CUSTOM_C_INTERCEPTS:
1369             return_string = ''
1370             if resulttype != None:
1371                 return_string = 'return '
1372             params = cmdinfo.elem.findall('param/name')
1373             param_names = []
1374             for param in params:
1375                 param_names.append(param.text)
1376             self.appendSection('command', '{\n    %s%s(%s);\n}' % (return_string, khr_name[2:], ", ".join(param_names)))
1377             return
1378         self.appendSection('command', '{')
1379
1380         api_function_name = cmdinfo.elem.attrib.get('name')
1381         # GET THE TYPE OF FUNCTION
1382         if True in [ftxt in api_function_name for ftxt in ['Create', 'Allocate']]:
1383             # Get last param
1384             last_param = cmdinfo.elem.findall('param')[-1]
1385             lp_txt = last_param.find('name').text
1386             lp_len = None
1387             if ('len' in last_param.attrib):
1388                 lp_len = last_param.attrib['len']
1389                 lp_len = lp_len.replace('::', '->')
1390             lp_type = last_param.find('type').text
1391             handle_type = 'dispatchable'
1392             allocator_txt = 'CreateDispObjHandle()';
1393             if (self.isHandleTypeNonDispatchable(lp_type)):
1394                 handle_type = 'non-' + handle_type
1395                 allocator_txt = 'global_unique_handle++';
1396             # Need to lock in both cases
1397             self.appendSection('command', '    unique_lock_t lock(global_lock);')
1398             if (lp_len != None):
1399                 #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
1400                 self.appendSection('command', '    for (uint32_t i = 0; i < %s; ++i) {' % (lp_len))
1401                 self.appendSection('command', '        %s[i] = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1402                 self.appendSection('command', '    }')
1403             else:
1404                 #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
1405                 if 'AllocateMemory' in api_function_name:
1406                     # Store allocation size in case it's mapped
1407                     self.appendSection('command', '    allocated_memory_size_map[(VkDeviceMemory)global_unique_handle] = pAllocateInfo->allocationSize;')
1408                 self.appendSection('command', '    *%s = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1409         elif True in [ftxt in api_function_name for ftxt in ['Destroy', 'Free']]:
1410             self.appendSection('command', '//Destroy object')
1411             if 'FreeMemory' in api_function_name:
1412                 # Remove from allocation map
1413                 self.appendSection('command', '    allocated_memory_size_map.erase(memory);')
1414         else:
1415             self.appendSection('command', '//Not a CREATE or DESTROY function')
1416
1417         # Return result variable, if any.
1418         if (resulttype != None):
1419             if api_function_name == 'vkGetEventStatus':
1420                 self.appendSection('command', '    return VK_EVENT_SET;')
1421             else:
1422                 self.appendSection('command', '    return VK_SUCCESS;')
1423         self.appendSection('command', '}')
1424     #
1425     # override makeProtoName to drop the "vk" prefix
1426     def makeProtoName(self, name, tail):
1427         return self.genOpts.apientry + name[2:] + tail