icd:Correctly Support Vulkan 1.1
[platform/upstream/Vulkan-Tools.git] / scripts / mock_icd_generator.py
1 #!/usr/bin/python3 -i
2 #
3 # Copyright (c) 2015-2017 The Khronos Group Inc.
4 # Copyright (c) 2015-2017 Valve Corporation
5 # Copyright (c) 2015-2017 LunarG, Inc.
6 # Copyright (c) 2015-2017 Google Inc.
7 #
8 # Licensed under the Apache License, Version 2.0 (the "License");
9 # you may not use this file except in compliance with the License.
10 # You may obtain a copy of the License at
11 #
12 #     http://www.apache.org/licenses/LICENSE-2.0
13 #
14 # Unless required by applicable law or agreed to in writing, software
15 # distributed under the License is distributed on an "AS IS" BASIS,
16 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 # See the License for the specific language governing permissions and
18 # limitations under the License.
19 #
20 # Author: Tobin Ehlis <tobine@google.com>
21 #
22 # This script generates a Mock ICD that intercepts almost all Vulkan
23 #  functions. That layer is not intended to be useful or even compilable
24 #  in its initial state. Rather it's intended to be a starting point that
25 #  can be copied and customized to assist in creation of a new layer.
26
27 import os,re,sys
28 from generator import *
29 from common_codegen import *
30
31
32 # Mock header code
33 HEADER_C_CODE = '''
34 using mutex_t = std::mutex;
35 using lock_guard_t = std::lock_guard<mutex_t>;
36 using unique_lock_t = std::unique_lock<mutex_t>;
37
38 static mutex_t global_lock;
39 static uint64_t global_unique_handle = 1;
40 static const uint32_t SUPPORTED_LOADER_ICD_INTERFACE_VERSION = 5;
41 static uint32_t loader_interface_version = 0;
42 static bool negotiate_loader_icd_interface_called = false;
43 static void* CreateDispObjHandle() {
44     auto handle = new VK_LOADER_DATA;
45     set_loader_magic_value(handle);
46     return handle;
47 }
48 static void DestroyDispObjHandle(void* handle) {
49     delete reinterpret_cast<VK_LOADER_DATA*>(handle);
50 }
51 '''
52
53 # Manual code at the top of the cpp source file
54 SOURCE_CPP_PREFIX = '''
55 using std::unordered_map;
56
57 static constexpr uint32_t icd_physical_device_count = 1;
58 static unordered_map<VkInstance, std::array<VkPhysicalDevice, icd_physical_device_count>> physical_device_map;
59
60 // Map device memory handle to any mapped allocations that we'll need to free on unmap
61 static unordered_map<VkDeviceMemory, std::vector<void*>> mapped_memory_map;
62
63 // Map device memory allocation handle to the size
64 static unordered_map<VkDeviceMemory, VkDeviceSize> allocated_memory_size_map;
65
66 static unordered_map<VkDevice, unordered_map<uint32_t, unordered_map<uint32_t, VkQueue>>> queue_map;
67 static unordered_map<VkDevice, unordered_map<VkBuffer, VkBufferCreateInfo>> buffer_map;
68 static unordered_map<VkDevice, unordered_map<VkImage, VkDeviceSize>> image_memory_size_map;
69
70 static constexpr uint32_t icd_swapchain_image_count = 1;
71 static unordered_map<VkSwapchainKHR, VkImage[icd_swapchain_image_count]> swapchain_image_map;
72
73 // TODO: Would like to codegen this but limits aren't in XML
74 static VkPhysicalDeviceLimits SetLimits(VkPhysicalDeviceLimits *limits) {
75     limits->maxImageDimension1D = 4096;
76     limits->maxImageDimension2D = 4096;
77     limits->maxImageDimension3D = 256;
78     limits->maxImageDimensionCube = 4096;
79     limits->maxImageArrayLayers = 256;
80     limits->maxTexelBufferElements = 65536;
81     limits->maxUniformBufferRange = 16384;
82     limits->maxStorageBufferRange = 134217728;
83     limits->maxPushConstantsSize = 128;
84     limits->maxMemoryAllocationCount = 4096;
85     limits->maxSamplerAllocationCount = 4000;
86     limits->bufferImageGranularity = 1;
87     limits->sparseAddressSpaceSize = 2147483648;
88     limits->maxBoundDescriptorSets = 4;
89     limits->maxPerStageDescriptorSamplers = 16;
90     limits->maxPerStageDescriptorUniformBuffers = 12;
91     limits->maxPerStageDescriptorStorageBuffers = 4;
92     limits->maxPerStageDescriptorSampledImages = 16;
93     limits->maxPerStageDescriptorStorageImages = 4;
94     limits->maxPerStageDescriptorInputAttachments = 4;
95     limits->maxPerStageResources = 128;
96     limits->maxDescriptorSetSamplers = 96;
97     limits->maxDescriptorSetUniformBuffers = 72;
98     limits->maxDescriptorSetUniformBuffersDynamic = 8;
99     limits->maxDescriptorSetStorageBuffers = 24;
100     limits->maxDescriptorSetStorageBuffersDynamic = 4;
101     limits->maxDescriptorSetSampledImages = 96;
102     limits->maxDescriptorSetStorageImages = 24;
103     limits->maxDescriptorSetInputAttachments = 4;
104     limits->maxVertexInputAttributes = 16;
105     limits->maxVertexInputBindings = 16;
106     limits->maxVertexInputAttributeOffset = 2047;
107     limits->maxVertexInputBindingStride = 2048;
108     limits->maxVertexOutputComponents = 64;
109     limits->maxTessellationGenerationLevel = 64;
110     limits->maxTessellationPatchSize = 32;
111     limits->maxTessellationControlPerVertexInputComponents = 64;
112     limits->maxTessellationControlPerVertexOutputComponents = 64;
113     limits->maxTessellationControlPerPatchOutputComponents = 120;
114     limits->maxTessellationControlTotalOutputComponents = 2048;
115     limits->maxTessellationEvaluationInputComponents = 64;
116     limits->maxTessellationEvaluationOutputComponents = 64;
117     limits->maxGeometryShaderInvocations = 32;
118     limits->maxGeometryInputComponents = 64;
119     limits->maxGeometryOutputComponents = 64;
120     limits->maxGeometryOutputVertices = 256;
121     limits->maxGeometryTotalOutputComponents = 1024;
122     limits->maxFragmentInputComponents = 64;
123     limits->maxFragmentOutputAttachments = 4;
124     limits->maxFragmentDualSrcAttachments = 1;
125     limits->maxFragmentCombinedOutputResources = 4;
126     limits->maxComputeSharedMemorySize = 16384;
127     limits->maxComputeWorkGroupCount[0] = 65535;
128     limits->maxComputeWorkGroupCount[1] = 65535;
129     limits->maxComputeWorkGroupCount[2] = 65535;
130     limits->maxComputeWorkGroupInvocations = 128;
131     limits->maxComputeWorkGroupSize[0] = 128;
132     limits->maxComputeWorkGroupSize[1] = 128;
133     limits->maxComputeWorkGroupSize[2] = 64;
134     limits->subPixelPrecisionBits = 4;
135     limits->subTexelPrecisionBits = 4;
136     limits->mipmapPrecisionBits = 4;
137     limits->maxDrawIndexedIndexValue = UINT32_MAX;
138     limits->maxDrawIndirectCount = UINT16_MAX;
139     limits->maxSamplerLodBias = 2.0f;
140     limits->maxSamplerAnisotropy = 16;
141     limits->maxViewports = 16;
142     limits->maxViewportDimensions[0] = 4096;
143     limits->maxViewportDimensions[1] = 4096;
144     limits->viewportBoundsRange[0] = -8192;
145     limits->viewportBoundsRange[1] = 8191;
146     limits->viewportSubPixelBits = 0;
147     limits->minMemoryMapAlignment = 64;
148     limits->minTexelBufferOffsetAlignment = 16;
149     limits->minUniformBufferOffsetAlignment = 16;
150     limits->minStorageBufferOffsetAlignment = 16;
151     limits->minTexelOffset = -8;
152     limits->maxTexelOffset = 7;
153     limits->minTexelGatherOffset = -8;
154     limits->maxTexelGatherOffset = 7;
155     limits->minInterpolationOffset = 0.0f;
156     limits->maxInterpolationOffset = 0.5f;
157     limits->subPixelInterpolationOffsetBits = 4;
158     limits->maxFramebufferWidth = 4096;
159     limits->maxFramebufferHeight = 4096;
160     limits->maxFramebufferLayers = 256;
161     limits->framebufferColorSampleCounts = 0x7F;
162     limits->framebufferDepthSampleCounts = 0x7F;
163     limits->framebufferStencilSampleCounts = 0x7F;
164     limits->framebufferNoAttachmentsSampleCounts = 0x7F;
165     limits->maxColorAttachments = 4;
166     limits->sampledImageColorSampleCounts = 0x7F;
167     limits->sampledImageIntegerSampleCounts = 0x7F;
168     limits->sampledImageDepthSampleCounts = 0x7F;
169     limits->sampledImageStencilSampleCounts = 0x7F;
170     limits->storageImageSampleCounts = 0x7F;
171     limits->maxSampleMaskWords = 1;
172     limits->timestampComputeAndGraphics = VK_TRUE;
173     limits->timestampPeriod = 1;
174     limits->maxClipDistances = 8;
175     limits->maxCullDistances = 8;
176     limits->maxCombinedClipAndCullDistances = 8;
177     limits->discreteQueuePriorities = 2;
178     limits->pointSizeRange[0] = 1.0f;
179     limits->pointSizeRange[1] = 64.0f;
180     limits->lineWidthRange[0] = 1.0f;
181     limits->lineWidthRange[1] = 8.0f;
182     limits->pointSizeGranularity = 1.0f;
183     limits->lineWidthGranularity = 1.0f;
184     limits->strictLines = VK_TRUE;
185     limits->standardSampleLocations = VK_TRUE;
186     limits->optimalBufferCopyOffsetAlignment = 1;
187     limits->optimalBufferCopyRowPitchAlignment = 1;
188     limits->nonCoherentAtomSize = 256;
189
190     return *limits;
191 }
192
193 void SetBoolArrayTrue(VkBool32* bool_array, uint32_t num_bools)
194 {
195     for (uint32_t i = 0; i < num_bools; ++i) {
196         bool_array[i] = VK_TRUE;
197     }
198 }
199 '''
200
201 # Manual code at the end of the cpp source file
202 SOURCE_CPP_POSTFIX = '''
203
204 static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) {
205     // TODO: This function should only care about physical device functions and return nullptr for other functions
206     const auto &item = name_to_funcptr_map.find(funcName);
207     if (item != name_to_funcptr_map.end()) {
208         return reinterpret_cast<PFN_vkVoidFunction>(item->second);
209     }
210     // Mock should intercept all functions so if we get here just return null
211     return nullptr;
212 }
213
214 } // namespace vkmock
215
216 #if defined(__GNUC__) && __GNUC__ >= 4
217 #define EXPORT __attribute__((visibility("default")))
218 #elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
219 #define EXPORT __attribute__((visibility("default")))
220 #else
221 #define EXPORT
222 #endif
223
224 extern "C" {
225
226 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName) {
227     if (!vkmock::negotiate_loader_icd_interface_called) {
228         vkmock::loader_interface_version = 1;
229     }
230     return vkmock::GetInstanceProcAddr(instance, pName);
231 }
232
233 EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName) {
234     return vkmock::GetPhysicalDeviceProcAddr(instance, pName);
235 }
236
237 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion) {
238     vkmock::negotiate_loader_icd_interface_called = true;
239     vkmock::loader_interface_version = *pSupportedVersion;
240     if (*pSupportedVersion > vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION) {
241         *pSupportedVersion = vkmock::SUPPORTED_LOADER_ICD_INTERFACE_VERSION;
242     }
243     return VK_SUCCESS;
244 }
245
246
247 EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
248     VkInstance                                  instance,
249     VkSurfaceKHR                                surface,
250     const VkAllocationCallbacks*                pAllocator)
251 {
252     vkmock::DestroySurfaceKHR(instance, surface, pAllocator);
253 }
254
255 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
256     VkPhysicalDevice                            physicalDevice,
257     uint32_t                                    queueFamilyIndex,
258     VkSurfaceKHR                                surface,
259     VkBool32*                                   pSupported)
260 {
261     return vkmock::GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
262 }
263
264 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
265     VkPhysicalDevice                            physicalDevice,
266     VkSurfaceKHR                                surface,
267     VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities)
268 {
269     return vkmock::GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
270 }
271
272 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
273     VkPhysicalDevice                            physicalDevice,
274     VkSurfaceKHR                                surface,
275     uint32_t*                                   pSurfaceFormatCount,
276     VkSurfaceFormatKHR*                         pSurfaceFormats)
277 {
278     return vkmock::GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
279 }
280
281 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
282     VkPhysicalDevice                            physicalDevice,
283     VkSurfaceKHR                                surface,
284     uint32_t*                                   pPresentModeCount,
285     VkPresentModeKHR*                           pPresentModes)
286 {
287     return vkmock::GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
288 }
289
290 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
291     VkInstance                                  instance,
292     const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
293     const VkAllocationCallbacks*                pAllocator,
294     VkSurfaceKHR*                               pSurface)
295 {
296     return vkmock::CreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
297 }
298
299 #ifdef VK_USE_PLATFORM_XLIB_KHR
300
301 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
302     VkInstance                                  instance,
303     const VkXlibSurfaceCreateInfoKHR*           pCreateInfo,
304     const VkAllocationCallbacks*                pAllocator,
305     VkSurfaceKHR*                               pSurface)
306 {
307     return vkmock::CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
308 }
309 #endif /* VK_USE_PLATFORM_XLIB_KHR */
310
311 #ifdef VK_USE_PLATFORM_XCB_KHR
312
313 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
314     VkInstance                                  instance,
315     const VkXcbSurfaceCreateInfoKHR*            pCreateInfo,
316     const VkAllocationCallbacks*                pAllocator,
317     VkSurfaceKHR*                               pSurface)
318 {
319     return vkmock::CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
320 }
321 #endif /* VK_USE_PLATFORM_XCB_KHR */
322
323 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
324
325 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
326     VkInstance                                  instance,
327     const VkWaylandSurfaceCreateInfoKHR*        pCreateInfo,
328     const VkAllocationCallbacks*                pAllocator,
329     VkSurfaceKHR*                               pSurface)
330 {
331     return vkmock::CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
332 }
333 #endif /* VK_USE_PLATFORM_WAYLAND_KHR */
334
335 #ifdef VK_USE_PLATFORM_ANDROID_KHR
336
337 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
338     VkInstance                                  instance,
339     const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
340     const VkAllocationCallbacks*                pAllocator,
341     VkSurfaceKHR*                               pSurface)
342 {
343     return vkmock::CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
344 }
345 #endif /* VK_USE_PLATFORM_ANDROID_KHR */
346
347 #ifdef VK_USE_PLATFORM_WIN32_KHR
348
349 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
350     VkInstance                                  instance,
351     const VkWin32SurfaceCreateInfoKHR*          pCreateInfo,
352     const VkAllocationCallbacks*                pAllocator,
353     VkSurfaceKHR*                               pSurface)
354 {
355     return vkmock::CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
356 }
357 #endif /* VK_USE_PLATFORM_WIN32_KHR */
358
359 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
360     VkDevice                                    device,
361     VkSurfaceKHR                                surface,
362     VkDeviceGroupPresentModeFlagsKHR*           pModes)
363 {
364     return vkmock::GetDeviceGroupSurfacePresentModesKHR(device, surface, pModes);
365 }
366
367 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
368     VkPhysicalDevice                            physicalDevice,
369     VkSurfaceKHR                                surface,
370     uint32_t*                                   pRectCount,
371     VkRect2D*                                   pRects)
372 {
373     return vkmock::GetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects);
374 }
375
376 #ifdef VK_USE_PLATFORM_VI_NN
377
378 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN(
379     VkInstance                                  instance,
380     const VkViSurfaceCreateInfoNN*              pCreateInfo,
381     const VkAllocationCallbacks*                pAllocator,
382     VkSurfaceKHR*                               pSurface)
383 {
384     return vkmock::CreateViSurfaceNN(instance, pCreateInfo, pAllocator, pSurface);
385 }
386 #endif /* VK_USE_PLATFORM_VI_NN */
387
388 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
389     VkPhysicalDevice                            physicalDevice,
390     VkSurfaceKHR                                surface,
391     VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities)
392 {
393     return vkmock::GetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities);
394 }
395
396 #ifdef VK_USE_PLATFORM_IOS_MVK
397
398 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK(
399     VkInstance                                  instance,
400     const VkIOSSurfaceCreateInfoMVK*            pCreateInfo,
401     const VkAllocationCallbacks*                pAllocator,
402     VkSurfaceKHR*                               pSurface)
403 {
404     return vkmock::CreateIOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
405 }
406 #endif /* VK_USE_PLATFORM_IOS_MVK */
407
408 #ifdef VK_USE_PLATFORM_MACOS_MVK
409
410 EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(
411     VkInstance                                  instance,
412     const VkMacOSSurfaceCreateInfoMVK*          pCreateInfo,
413     const VkAllocationCallbacks*                pAllocator,
414     VkSurfaceKHR*                               pSurface)
415 {
416     return vkmock::CreateMacOSSurfaceMVK(instance, pCreateInfo, pAllocator, pSurface);
417 }
418 #endif /* VK_USE_PLATFORM_MACOS_MVK */
419
420 } // end extern "C"
421
422 '''
423
424 CUSTOM_C_INTERCEPTS = {
425 'vkCreateInstance': '''
426     // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
427     //  apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
428     //  ICD should behave as normal.
429     if (loader_interface_version <= 4) {
430         return VK_ERROR_INCOMPATIBLE_DRIVER;
431     }
432     *pInstance = (VkInstance)CreateDispObjHandle();
433     for (auto& physical_device : physical_device_map[*pInstance])
434         physical_device = (VkPhysicalDevice)CreateDispObjHandle();
435     // TODO: If emulating specific device caps, will need to add intelligence here
436     return VK_SUCCESS;
437 ''',
438 'vkDestroyInstance': '''
439     if (instance) {
440         for (const auto physical_device : physical_device_map.at(instance))
441             DestroyDispObjHandle((void*)physical_device);
442         physical_device_map.erase(instance);
443         DestroyDispObjHandle((void*)instance);
444     }
445 ''',
446 'vkEnumeratePhysicalDevices': '''
447     VkResult result_code = VK_SUCCESS;
448     if (pPhysicalDevices) {
449         const auto return_count = (std::min)(*pPhysicalDeviceCount, icd_physical_device_count);
450         for (uint32_t i = 0; i < return_count; ++i) pPhysicalDevices[i] = physical_device_map.at(instance)[i];
451         if (return_count < icd_physical_device_count) result_code = VK_INCOMPLETE;
452         *pPhysicalDeviceCount = return_count;
453     } else {
454         *pPhysicalDeviceCount = icd_physical_device_count;
455     }
456     return result_code;
457 ''',
458 'vkCreateDevice': '''
459     *pDevice = (VkDevice)CreateDispObjHandle();
460     // TODO: If emulating specific device caps, will need to add intelligence here
461     return VK_SUCCESS;
462 ''',
463 'vkDestroyDevice': '''
464     unique_lock_t lock(global_lock);
465     // First destroy sub-device objects
466     // Destroy Queues
467     for (auto dev_queue_map_pair : queue_map) {
468         for (auto queue_family_map_pair : queue_map[dev_queue_map_pair.first]) {
469             for (auto index_queue_pair : queue_map[dev_queue_map_pair.first][queue_family_map_pair.first]) {
470                 DestroyDispObjHandle((void*)index_queue_pair.second);
471             }
472         }
473     }
474     queue_map.clear();
475     buffer_map.erase(device);
476     image_memory_size_map.erase(device);
477     // Now destroy device
478     DestroyDispObjHandle((void*)device);
479     // TODO: If emulating specific device caps, will need to add intelligence here
480 ''',
481 'vkGetDeviceQueue': '''
482     unique_lock_t lock(global_lock);
483     auto queue = queue_map[device][queueFamilyIndex][queueIndex];
484     if (queue) {
485         *pQueue = queue;
486     } else {
487         *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
488     }
489     // TODO: If emulating specific device caps, will need to add intelligence here
490     return;
491 ''',
492 'vkGetDeviceQueue2': '''
493     GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
494     // TODO: Add further support for GetDeviceQueue2 features
495 ''',
496 'vkEnumerateInstanceLayerProperties': '''
497     return VK_SUCCESS;
498 ''',
499 'vkEnumerateInstanceVersion': '''
500     *pApiVersion = VK_API_VERSION_1_1;
501     return VK_SUCCESS;
502 ''',
503 'vkEnumerateDeviceLayerProperties': '''
504     return VK_SUCCESS;
505 ''',
506 'vkEnumerateInstanceExtensionProperties': '''
507     // If requesting number of extensions, return that
508     if (!pLayerName) {
509         if (!pProperties) {
510             *pPropertyCount = (uint32_t)instance_extension_map.size();
511         } else {
512             uint32_t i = 0;
513             for (const auto &name_ver_pair : instance_extension_map) {
514                 if (i == *pPropertyCount) {
515                     break;
516                 }
517                 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
518                 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
519                 pProperties[i].specVersion = name_ver_pair.second;
520                 ++i;
521             }
522             if (i != instance_extension_map.size()) {
523                 return VK_INCOMPLETE;
524             }
525         }
526     }
527     // If requesting extension properties, fill in data struct for number of extensions
528     return VK_SUCCESS;
529 ''',
530 'vkEnumerateDeviceExtensionProperties': '''
531     // If requesting number of extensions, return that
532     if (!pLayerName) {
533         if (!pProperties) {
534             *pPropertyCount = (uint32_t)device_extension_map.size();
535         } else {
536             uint32_t i = 0;
537             for (const auto &name_ver_pair : device_extension_map) {
538                 if (i == *pPropertyCount) {
539                     break;
540                 }
541                 std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
542                 pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
543                 pProperties[i].specVersion = name_ver_pair.second;
544                 ++i;
545             }
546             if (i != device_extension_map.size()) {
547                 return VK_INCOMPLETE;
548             }
549         }
550     }
551     // If requesting extension properties, fill in data struct for number of extensions
552     return VK_SUCCESS;
553 ''',
554 'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
555     // Currently always say that all present modes are supported
556     if (!pPresentModes) {
557         *pPresentModeCount = 6;
558     } else {
559         // Intentionally falling through and just filling however many modes are requested
560         switch(*pPresentModeCount) {
561         case 6:
562             pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
563             // fall through
564         case 5:
565             pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
566             // fall through
567         case 4:
568             pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
569             // fall through
570         case 3:
571             pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
572             // fall through
573         case 2:
574             pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
575             // fall through
576         default:
577             pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
578             break;
579         }
580     }
581     return VK_SUCCESS;
582 ''',
583 'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
584     // Currently always say that RGBA8 & BGRA8 are supported
585     if (!pSurfaceFormats) {
586         *pSurfaceFormatCount = 2;
587     } else {
588         // Intentionally falling through and just filling however many types are requested
589         switch(*pSurfaceFormatCount) {
590         case 2:
591             pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
592             pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
593             // fall through
594         default:
595             pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
596             pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
597             break;
598         }
599     }
600     return VK_SUCCESS;
601 ''',
602 'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
603     // Currently always say that RGBA8 & BGRA8 are supported
604     if (!pSurfaceFormats) {
605         *pSurfaceFormatCount = 2;
606     } else {
607         // Intentionally falling through and just filling however many types are requested
608         switch(*pSurfaceFormatCount) {
609         case 2:
610             pSurfaceFormats[1].pNext = nullptr;
611             pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
612             pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
613             // fall through
614         default:
615             pSurfaceFormats[1].pNext = nullptr;
616             pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
617             pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
618             break;
619         }
620     }
621     return VK_SUCCESS;
622 ''',
623 'vkGetPhysicalDeviceSurfaceSupportKHR': '''
624     // Currently say that all surface/queue combos are supported
625     *pSupported = VK_TRUE;
626     return VK_SUCCESS;
627 ''',
628 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
629     // In general just say max supported is available for requested surface
630     pSurfaceCapabilities->minImageCount = 1;
631     pSurfaceCapabilities->maxImageCount = 0;
632     pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
633     pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
634     pSurfaceCapabilities->minImageExtent.width = 1;
635     pSurfaceCapabilities->minImageExtent.height = 1;
636     pSurfaceCapabilities->maxImageExtent.width = 3840;
637     pSurfaceCapabilities->maxImageExtent.height = 2160;
638     pSurfaceCapabilities->maxImageArrayLayers = 128;
639     pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
640                                                 VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
641                                                 VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
642                                                 VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
643                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
644                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
645                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
646                                                 VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
647                                                 VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
648     pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
649     pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
650                                                     VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
651                                                     VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
652                                                     VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
653     pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
654                                                 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
655                                                 VK_IMAGE_USAGE_SAMPLED_BIT |
656                                                 VK_IMAGE_USAGE_STORAGE_BIT |
657                                                 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
658                                                 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
659                                                 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
660                                                 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
661     return VK_SUCCESS;
662 ''',
663 'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
664     GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
665     return VK_SUCCESS;
666 ''',
667 'vkGetInstanceProcAddr': '''
668     if (!negotiate_loader_icd_interface_called) {
669         loader_interface_version = 0;
670     }
671     const auto &item = name_to_funcptr_map.find(pName);
672     if (item != name_to_funcptr_map.end()) {
673         return reinterpret_cast<PFN_vkVoidFunction>(item->second);
674     }
675     // Mock should intercept all functions so if we get here just return null
676     return nullptr;
677 ''',
678 'vkGetDeviceProcAddr': '''
679     return GetInstanceProcAddr(nullptr, pName);
680 ''',
681 'vkGetPhysicalDeviceMemoryProperties': '''
682     pMemoryProperties->memoryTypeCount = 2;
683     pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
684     pMemoryProperties->memoryTypes[0].heapIndex = 0;
685     pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
686     pMemoryProperties->memoryTypes[1].heapIndex = 1;
687     pMemoryProperties->memoryHeapCount = 2;
688     pMemoryProperties->memoryHeaps[0].flags = 0;
689     pMemoryProperties->memoryHeaps[0].size = 8000000000;
690     pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
691     pMemoryProperties->memoryHeaps[1].size = 8000000000;
692 ''',
693 'vkGetPhysicalDeviceMemoryProperties2KHR': '''
694     GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
695 ''',
696 'vkGetPhysicalDeviceQueueFamilyProperties': '''
697     if (!pQueueFamilyProperties) {
698         *pQueueFamilyPropertyCount = 1;
699     } else {
700         if (*pQueueFamilyPropertyCount) {
701             pQueueFamilyProperties[0].queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT;
702             pQueueFamilyProperties[0].queueCount = 1;
703             pQueueFamilyProperties[0].timestampValidBits = 0;
704             pQueueFamilyProperties[0].minImageTransferGranularity = {1,1,1};
705         }
706     }
707 ''',
708 'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
709     if (pQueueFamilyPropertyCount && pQueueFamilyProperties) {
710         GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, &pQueueFamilyProperties->queueFamilyProperties);
711     } else {
712         GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, nullptr);
713     }
714 ''',
715 'vkGetPhysicalDeviceFeatures': '''
716     uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
717     VkBool32 *bool_array = &pFeatures->robustBufferAccess;
718     SetBoolArrayTrue(bool_array, num_bools);
719 ''',
720 'vkGetPhysicalDeviceFeatures2KHR': '''
721     GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
722     uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
723     VkBool32* feat_bools = nullptr;
724     const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
725     if (desc_idx_features) {
726         const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
727         num_bools = bool_size/sizeof(VkBool32);
728         feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
729         SetBoolArrayTrue(feat_bools, num_bools);
730     }
731     const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
732     if (blendop_features) {
733         const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
734         num_bools = bool_size/sizeof(VkBool32);
735         feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
736         SetBoolArrayTrue(feat_bools, num_bools);
737     }
738 ''',
739 'vkGetPhysicalDeviceFormatProperties': '''
740     if (VK_FORMAT_UNDEFINED == format) {
741         *pFormatProperties = { 0x0, 0x0, 0x0 };
742     } else {
743         // TODO: Just returning full support for everything initially
744         *pFormatProperties = { 0x00FFFFFF, 0x00FFFFFF, 0x00FFFFFF };
745     }
746 ''',
747 'vkGetPhysicalDeviceFormatProperties2KHR': '''
748     GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
749 ''',
750 'vkGetPhysicalDeviceImageFormatProperties': '''
751     // A hardcoded unsupported format
752     if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
753         return VK_ERROR_FORMAT_NOT_SUPPORTED;
754     }
755
756     // TODO: Just hard-coding some values for now
757     // TODO: If tiling is linear, limit the mips, levels, & sample count
758     if (VK_IMAGE_TILING_LINEAR == tiling) {
759         *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
760     } else {
761         // We hard-code support for all sample counts except 64 bits.
762         *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
763     }
764     return VK_SUCCESS;
765 ''',
766 'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
767     GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
768     return VK_SUCCESS;
769 ''',
770 'vkGetPhysicalDeviceProperties': '''
771     // TODO: Just hard-coding some values for now
772     pProperties->apiVersion = VK_API_VERSION_1_1;
773     pProperties->driverVersion = 1;
774     pProperties->vendorID = 0xba5eba11;
775     pProperties->deviceID = 0xf005ba11;
776     pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
777     //std::string devName = "Vulkan Mock Device";
778     strcpy(pProperties->deviceName, "Vulkan Mock Device");
779     pProperties->pipelineCacheUUID[0] = 18;
780     pProperties->limits = SetLimits(&pProperties->limits);
781     pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
782 ''',
783 'vkGetPhysicalDeviceProperties2KHR': '''
784     GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
785     const auto *desc_idx_props = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingPropertiesEXT>(pProperties->pNext);
786     if (desc_idx_props) {
787         VkPhysicalDeviceDescriptorIndexingPropertiesEXT* write_props = (VkPhysicalDeviceDescriptorIndexingPropertiesEXT*)desc_idx_props;
788         write_props->maxUpdateAfterBindDescriptorsInAllPools = 500000;
789         write_props->shaderUniformBufferArrayNonUniformIndexingNative = false;
790         write_props->shaderSampledImageArrayNonUniformIndexingNative = false;
791         write_props->shaderStorageBufferArrayNonUniformIndexingNative = false;
792         write_props->shaderStorageImageArrayNonUniformIndexingNative = false;
793         write_props->shaderInputAttachmentArrayNonUniformIndexingNative = false;
794         write_props->robustBufferAccessUpdateAfterBind = true;
795         write_props->quadDivergentImplicitLod = true;
796         write_props->maxPerStageDescriptorUpdateAfterBindSamplers = 500000;
797         write_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers = 500000;
798         write_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers = 500000;
799         write_props->maxPerStageDescriptorUpdateAfterBindSampledImages = 500000;
800         write_props->maxPerStageDescriptorUpdateAfterBindStorageImages = 500000;
801         write_props->maxPerStageDescriptorUpdateAfterBindInputAttachments = 500000;
802         write_props->maxPerStageUpdateAfterBindResources = 500000;
803         write_props->maxDescriptorSetUpdateAfterBindSamplers = 500000;
804         write_props->maxDescriptorSetUpdateAfterBindUniformBuffers = 96;
805         write_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 8;
806         write_props->maxDescriptorSetUpdateAfterBindStorageBuffers = 500000;
807         write_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 4;
808         write_props->maxDescriptorSetUpdateAfterBindSampledImages = 500000;
809         write_props->maxDescriptorSetUpdateAfterBindStorageImages = 500000;
810         write_props->maxDescriptorSetUpdateAfterBindInputAttachments = 500000;
811     }
812
813     const auto *push_descriptor_props = lvl_find_in_chain<VkPhysicalDevicePushDescriptorPropertiesKHR>(pProperties->pNext);
814     if (push_descriptor_props) {
815         VkPhysicalDevicePushDescriptorPropertiesKHR* write_props = (VkPhysicalDevicePushDescriptorPropertiesKHR*)push_descriptor_props;
816         write_props->maxPushDescriptors = 256;
817     }
818
819     const auto *depth_stencil_resolve_props = lvl_find_in_chain<VkPhysicalDeviceDepthStencilResolvePropertiesKHR>(pProperties->pNext);
820     if (depth_stencil_resolve_props) {
821         VkPhysicalDeviceDepthStencilResolvePropertiesKHR* write_props = (VkPhysicalDeviceDepthStencilResolvePropertiesKHR*)depth_stencil_resolve_props;
822         write_props->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
823         write_props->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR;
824     }
825 ''',
826 'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
827     // Hard code support for all handle types and features
828     pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
829     pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
830     pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
831 ''',
832 'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
833     GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
834 ''',
835 'vkGetPhysicalDeviceExternalFenceProperties':'''
836     // Hard-code support for all handle types and features
837     pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
838     pExternalFenceProperties->compatibleHandleTypes = 0xF;
839     pExternalFenceProperties->externalFenceFeatures = 0x3;
840 ''',
841 'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
842     GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
843 ''',
844 'vkGetPhysicalDeviceExternalBufferProperties':'''
845     // Hard-code support for all handle types and features
846     pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
847     pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0x1FF;
848     pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = 0x1FF;
849 ''',
850 'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
851     GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
852 ''',
853 'vkGetBufferMemoryRequirements': '''
854     // TODO: Just hard-coding reqs for now
855     pMemoryRequirements->size = 4096;
856     pMemoryRequirements->alignment = 1;
857     pMemoryRequirements->memoryTypeBits = 0xFFFF;
858     // Return a better size based on the buffer size from the create info.
859     auto d_iter = buffer_map.find(device);
860     if (d_iter != buffer_map.end()) {
861         auto iter = d_iter->second.find(buffer);
862         if (iter != d_iter->second.end()) {
863             pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
864         }
865     }
866 ''',
867 'vkGetBufferMemoryRequirements2KHR': '''
868     GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
869 ''',
870 'vkGetImageMemoryRequirements': '''
871     pMemoryRequirements->size = 0;
872     pMemoryRequirements->alignment = 1;
873
874     auto d_iter = image_memory_size_map.find(device);
875     if(d_iter != image_memory_size_map.end()){
876         auto iter = d_iter->second.find(image);
877         if (iter != d_iter->second.end()) {
878             pMemoryRequirements->size = iter->second;
879         }
880     }
881     // Here we hard-code that the memory type at index 3 doesn't support this image.
882     pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
883 ''',
884 'vkGetImageMemoryRequirements2KHR': '''
885     GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
886 ''',
887 'vkMapMemory': '''
888     unique_lock_t lock(global_lock);
889     if (VK_WHOLE_SIZE == size) {
890         if (allocated_memory_size_map.count(memory) != 0)
891             size = allocated_memory_size_map[memory] - offset;
892         else
893             size = 0x10000;
894     }
895     void* map_addr = malloc((size_t)size);
896     mapped_memory_map[memory].push_back(map_addr);
897     *ppData = map_addr;
898     return VK_SUCCESS;
899 ''',
900 'vkUnmapMemory': '''
901     unique_lock_t lock(global_lock);
902     for (auto map_addr : mapped_memory_map[memory]) {
903         free(map_addr);
904     }
905     mapped_memory_map.erase(memory);
906 ''',
907 'vkGetImageSubresourceLayout': '''
908     // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure.
909     *pLayout = VkSubresourceLayout(); // Default constructor zero values.
910 ''',
911 'vkCreateSwapchainKHR': '''
912     unique_lock_t lock(global_lock);
913     *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
914     for(uint32_t i = 0; i < icd_swapchain_image_count; ++i){
915         swapchain_image_map[*pSwapchain][i] = (VkImage)global_unique_handle++;
916     }
917     return VK_SUCCESS;
918 ''',
919 'vkDestroySwapchainKHR': '''
920     unique_lock_t lock(global_lock);
921     swapchain_image_map.clear();
922 ''',
923 'vkGetSwapchainImagesKHR': '''
924     if (!pSwapchainImages) {
925         *pSwapchainImageCount = icd_swapchain_image_count;
926     } else {
927         unique_lock_t lock(global_lock);
928         for (uint32_t img_i = 0; img_i < (std::min)(*pSwapchainImageCount, icd_swapchain_image_count); ++img_i){
929             pSwapchainImages[img_i] = swapchain_image_map.at(swapchain)[img_i];
930         }
931
932         if (*pSwapchainImageCount < icd_swapchain_image_count) return VK_INCOMPLETE;
933         else if (*pSwapchainImageCount > icd_swapchain_image_count) *pSwapchainImageCount = icd_swapchain_image_count;
934     }
935     return VK_SUCCESS;
936 ''',
937 'vkAcquireNextImageKHR': '''
938     *pImageIndex = 0;
939     return VK_SUCCESS;
940 ''',
941 'vkAcquireNextImage2KHR': '''
942     *pImageIndex = 0;
943     return VK_SUCCESS;
944 ''',
945 'vkCreateBuffer': '''
946     unique_lock_t lock(global_lock);
947     *pBuffer = (VkBuffer)global_unique_handle++;
948     buffer_map[device][*pBuffer] = *pCreateInfo;
949     return VK_SUCCESS;
950 ''',
951 'vkDestroyBuffer': '''
952     unique_lock_t lock(global_lock);
953     buffer_map[device].erase(buffer);
954 ''',
955 'vkCreateImage': '''
956     unique_lock_t lock(global_lock);
957     *pImage = (VkImage)global_unique_handle++;
958     // TODO: A pixel size is 32 bytes. This accounts for the largest possible pixel size of any format. It could be changed to more accurate size if need be.
959     image_memory_size_map[device][*pImage] = pCreateInfo->extent.width * pCreateInfo->extent.height * pCreateInfo->extent.depth *
960                                              32 * pCreateInfo->arrayLayers * (pCreateInfo->mipLevels > 1 ? 2 : 1);
961     // plane count
962     switch (pCreateInfo->format) {
963         case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
964         case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
965         case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
966         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
967         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
968         case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
969         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
970         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
971         case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
972         case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
973         case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
974         case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
975             image_memory_size_map[device][*pImage] *= 3;
976             break;
977         case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
978         case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
979         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
980         case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
981         case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
982         case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
983         case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
984         case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
985             image_memory_size_map[device][*pImage] *= 2;
986             break;
987         default:
988             break;
989     }
990     return VK_SUCCESS;
991 ''',
992 'vkDestroyImage': '''
993     unique_lock_t lock(global_lock);
994     image_memory_size_map[device].erase(image);
995 ''',
996 }
997
998 # MockICDGeneratorOptions - subclass of GeneratorOptions.
999 #
1000 # Adds options used by MockICDOutputGenerator objects during Mock
1001 # ICD generation.
1002 #
1003 # Additional members
1004 #   prefixText - list of strings to prefix generated header with
1005 #     (usually a copyright statement + calling convention macros).
1006 #   protectFile - True if multiple inclusion protection should be
1007 #     generated (based on the filename) around the entire header.
1008 #   protectFeature - True if #ifndef..#endif protection should be
1009 #     generated around a feature interface in the header file.
1010 #   genFuncPointers - True if function pointer typedefs should be
1011 #     generated
1012 #   protectProto - If conditional protection should be generated
1013 #     around prototype declarations, set to either '#ifdef'
1014 #     to require opt-in (#ifdef protectProtoStr) or '#ifndef'
1015 #     to require opt-out (#ifndef protectProtoStr). Otherwise
1016 #     set to None.
1017 #   protectProtoStr - #ifdef/#ifndef symbol to use around prototype
1018 #     declarations, if protectProto is set
1019 #   apicall - string to use for the function declaration prefix,
1020 #     such as APICALL on Windows.
1021 #   apientry - string to use for the calling convention macro,
1022 #     in typedefs, such as APIENTRY.
1023 #   apientryp - string to use for the calling convention macro
1024 #     in function pointer typedefs, such as APIENTRYP.
1025 #   indentFuncProto - True if prototype declarations should put each
1026 #     parameter on a separate line
1027 #   indentFuncPointer - True if typedefed function pointers should put each
1028 #     parameter on a separate line
1029 #   alignFuncParam - if nonzero and parameters are being put on a
1030 #     separate line, align parameter names at the specified column
1031 class MockICDGeneratorOptions(GeneratorOptions):
1032     def __init__(self,
1033                  conventions = None,
1034                  filename = None,
1035                  directory = '.',
1036                  apiname = None,
1037                  profile = None,
1038                  versions = '.*',
1039                  emitversions = '.*',
1040                  defaultExtensions = None,
1041                  addExtensions = None,
1042                  removeExtensions = None,
1043                  emitExtensions = None,
1044                  sortProcedure = regSortFeatures,
1045                  prefixText = "",
1046                  genFuncPointers = True,
1047                  protectFile = True,
1048                  protectFeature = True,
1049                  protectProto = None,
1050                  protectProtoStr = None,
1051                  apicall = '',
1052                  apientry = '',
1053                  apientryp = '',
1054                  indentFuncProto = True,
1055                  indentFuncPointer = False,
1056                  alignFuncParam = 0,
1057                  expandEnumerants = True,
1058                  helper_file_type = ''):
1059         GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile,
1060                                   versions, emitversions, defaultExtensions,
1061                                   addExtensions, removeExtensions, emitExtensions, sortProcedure)
1062         self.prefixText      = prefixText
1063         self.genFuncPointers = genFuncPointers
1064         self.protectFile     = protectFile
1065         self.protectFeature  = protectFeature
1066         self.protectProto    = protectProto
1067         self.protectProtoStr = protectProtoStr
1068         self.apicall         = apicall
1069         self.apientry        = apientry
1070         self.apientryp       = apientryp
1071         self.indentFuncProto = indentFuncProto
1072         self.indentFuncPointer = indentFuncPointer
1073         self.alignFuncParam  = alignFuncParam
1074
1075 # MockICDOutputGenerator - subclass of OutputGenerator.
1076 # Generates a mock vulkan ICD.
1077 #  This is intended to be a minimal replacement for a vulkan device in order
1078 #  to enable Vulkan Validation testing.
1079 #
1080 # ---- methods ----
1081 # MockOutputGenerator(errFile, warnFile, diagFile) - args as for
1082 #   OutputGenerator. Defines additional internal state.
1083 # ---- methods overriding base class ----
1084 # beginFile(genOpts)
1085 # endFile()
1086 # beginFeature(interface, emit)
1087 # endFeature()
1088 # genType(typeinfo,name)
1089 # genStruct(typeinfo,name)
1090 # genGroup(groupinfo,name)
1091 # genEnum(enuminfo, name)
1092 # genCmd(cmdinfo)
1093 class MockICDOutputGenerator(OutputGenerator):
1094     """Generate specified API interfaces in a specific style, such as a C header"""
1095     # This is an ordered list of sections in the header file.
1096     TYPE_SECTIONS = ['include', 'define', 'basetype', 'handle', 'enum',
1097                      'group', 'bitmask', 'funcpointer', 'struct']
1098     ALL_SECTIONS = TYPE_SECTIONS + ['command']
1099     def __init__(self,
1100                  errFile = sys.stderr,
1101                  warnFile = sys.stderr,
1102                  diagFile = sys.stdout):
1103         OutputGenerator.__init__(self, errFile, warnFile, diagFile)
1104         # Internal state - accumulators for different inner block text
1105         self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1106         self.intercepts = []
1107
1108     # Check if the parameter passed in is a pointer to an array
1109     def paramIsArray(self, param):
1110         return param.attrib.get('len') is not None
1111
1112     # Check if the parameter passed in is a pointer
1113     def paramIsPointer(self, param):
1114         ispointer = False
1115         for elem in param:
1116             if ((elem.tag != 'type') and (elem.tail is not None)) and '*' in elem.tail:
1117                 ispointer = True
1118         return ispointer
1119
1120     # Check if an object is a non-dispatchable handle
1121     def isHandleTypeNonDispatchable(self, handletype):
1122         handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1123         if handle is not None and handle.find('type').text == 'VK_DEFINE_NON_DISPATCHABLE_HANDLE':
1124             return True
1125         else:
1126             return False
1127
1128     # Check if an object is a dispatchable handle
1129     def isHandleTypeDispatchable(self, handletype):
1130         handle = self.registry.tree.find("types/type/[name='" + handletype + "'][@category='handle']")
1131         if handle is not None and handle.find('type').text == 'VK_DEFINE_HANDLE':
1132             return True
1133         else:
1134             return False
1135
1136     def beginFile(self, genOpts):
1137         OutputGenerator.beginFile(self, genOpts)
1138         # C-specific
1139         #
1140         # Multiple inclusion protection & C++ namespace.
1141         self.header = False
1142         if (genOpts.protectFile and self.genOpts.filename and 'h' == self.genOpts.filename[-1]):
1143             self.header = True
1144             headerSym = '__' + re.sub(r'\.h', '_h_', os.path.basename(self.genOpts.filename))
1145             write('#ifndef', headerSym, file=self.outFile)
1146             write('#define', headerSym, '1', file=self.outFile)
1147             self.newline()
1148         #
1149         # User-supplied prefix text, if any (list of strings)
1150         if (genOpts.prefixText):
1151             for s in genOpts.prefixText:
1152                 write(s, file=self.outFile)
1153         if self.header:
1154             write('#include <unordered_map>', file=self.outFile)
1155             write('#include <mutex>', file=self.outFile)
1156             write('#include <string>', file=self.outFile)
1157             write('#include <cstring>', file=self.outFile)
1158             write('#include "vulkan/vk_icd.h"', file=self.outFile)
1159         else:
1160             write('#include "mock_icd.h"', file=self.outFile)
1161             write('#include <stdlib.h>', file=self.outFile)
1162             write('#include <algorithm>', file=self.outFile)
1163             write('#include <array>', file=self.outFile)
1164             write('#include <vector>', file=self.outFile)
1165             write('#include "vk_typemap_helper.h"', file=self.outFile)
1166
1167         write('namespace vkmock {', file=self.outFile)
1168         if self.header:
1169             self.newline()
1170             write(HEADER_C_CODE, file=self.outFile)
1171             # Include all of the extensions in ICD except specific ignored ones
1172             device_exts = []
1173             instance_exts = []
1174             # Ignore extensions that ICDs should not implement or are not safe to report
1175             ignore_exts = ['VK_EXT_validation_cache']
1176             for ext in self.registry.tree.findall("extensions/extension"):
1177                 if ext.attrib['supported'] != 'disabled': # Only include enabled extensions
1178                     if (ext.attrib['name'] in ignore_exts):
1179                         pass
1180                     elif (ext.attrib.get('type') and 'instance' == ext.attrib['type']):
1181                         instance_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1182                     else:
1183                         device_exts.append('    {"%s", %s},' % (ext.attrib['name'], ext[0][0].attrib['value']))
1184             write('// Map of instance extension name to version', file=self.outFile)
1185             write('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {', file=self.outFile)
1186             write('\n'.join(instance_exts), file=self.outFile)
1187             write('};', file=self.outFile)
1188             write('// Map of device extension name to version', file=self.outFile)
1189             write('static const std::unordered_map<std::string, uint32_t> device_extension_map = {', file=self.outFile)
1190             write('\n'.join(device_exts), file=self.outFile)
1191             write('};', file=self.outFile)
1192
1193         else:
1194             self.newline()
1195             write(SOURCE_CPP_PREFIX, file=self.outFile)
1196
1197     def endFile(self):
1198         # C-specific
1199         # Finish C++ namespace and multiple inclusion protection
1200         self.newline()
1201         if self.header:
1202             # record intercepted procedures
1203             write('// Map of all APIs to be intercepted by this layer', file=self.outFile)
1204             write('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {', file=self.outFile)
1205             write('\n'.join(self.intercepts), file=self.outFile)
1206             write('};\n', file=self.outFile)
1207             self.newline()
1208             write('} // namespace vkmock', file=self.outFile)
1209             self.newline()
1210             write('#endif', file=self.outFile)
1211         else: # Loader-layer-interface, need to implement global interface functions
1212             write(SOURCE_CPP_POSTFIX, file=self.outFile)
1213         # Finish processing in superclass
1214         OutputGenerator.endFile(self)
1215     def beginFeature(self, interface, emit):
1216         #write('// starting beginFeature', file=self.outFile)
1217         # Start processing in superclass
1218         OutputGenerator.beginFeature(self, interface, emit)
1219         self.featureExtraProtect = GetFeatureProtect(interface)
1220         # C-specific
1221         # Accumulate includes, defines, types, enums, function pointer typedefs,
1222         # end function prototypes separately for this feature. They're only
1223         # printed in endFeature().
1224         self.sections = dict([(section, []) for section in self.ALL_SECTIONS])
1225         #write('// ending beginFeature', file=self.outFile)
1226     def endFeature(self):
1227         # C-specific
1228         # Actually write the interface to the output file.
1229         #write('// starting endFeature', file=self.outFile)
1230         if (self.emit):
1231             self.newline()
1232             if (self.genOpts.protectFeature):
1233                 write('#ifndef', self.featureName, file=self.outFile)
1234             # If type declarations are needed by other features based on
1235             # this one, it may be necessary to suppress the ExtraProtect,
1236             # or move it below the 'for section...' loop.
1237             #write('// endFeature looking at self.featureExtraProtect', file=self.outFile)
1238             if (self.featureExtraProtect != None):
1239                 write('#ifdef', self.featureExtraProtect, file=self.outFile)
1240             #write('#define', self.featureName, '1', file=self.outFile)
1241             for section in self.TYPE_SECTIONS:
1242                 #write('// endFeature writing section'+section, file=self.outFile)
1243                 contents = self.sections[section]
1244                 if contents:
1245                     write('\n'.join(contents), file=self.outFile)
1246                     self.newline()
1247             #write('// endFeature looking at self.sections[command]', file=self.outFile)
1248             if (self.sections['command']):
1249                 write('\n'.join(self.sections['command']), end=u'', file=self.outFile)
1250                 self.newline()
1251             if (self.featureExtraProtect != None):
1252                 write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile)
1253             if (self.genOpts.protectFeature):
1254                 write('#endif /*', self.featureName, '*/', file=self.outFile)
1255         # Finish processing in superclass
1256         OutputGenerator.endFeature(self)
1257         #write('// ending endFeature', file=self.outFile)
1258     #
1259     # Append a definition to the specified section
1260     def appendSection(self, section, text):
1261         # self.sections[section].append('SECTION: ' + section + '\n')
1262         self.sections[section].append(text)
1263     #
1264     # Type generation
1265     def genType(self, typeinfo, name, alias):
1266         pass
1267     #
1268     # Struct (e.g. C "struct" type) generation.
1269     # This is a special case of the <type> tag where the contents are
1270     # interpreted as a set of <member> tags instead of freeform C
1271     # C type declarations. The <member> tags are just like <param>
1272     # tags - they are a declaration of a struct or union member.
1273     # Only simple member declarations are supported (no nested
1274     # structs etc.)
1275     def genStruct(self, typeinfo, typeName, alias):
1276         OutputGenerator.genStruct(self, typeinfo, typeName, alias)
1277         body = 'typedef ' + typeinfo.elem.get('category') + ' ' + typeName + ' {\n'
1278         # paramdecl = self.makeCParamDecl(typeinfo.elem, self.genOpts.alignFuncParam)
1279         for member in typeinfo.elem.findall('.//member'):
1280             body += self.makeCParamDecl(member, self.genOpts.alignFuncParam)
1281             body += ';\n'
1282         body += '} ' + typeName + ';\n'
1283         self.appendSection('struct', body)
1284     #
1285     # Group (e.g. C "enum" type) generation.
1286     # These are concatenated together with other types.
1287     def genGroup(self, groupinfo, groupName, alias):
1288         pass
1289     # Enumerant generation
1290     # <enum> tags may specify their values in several ways, but are usually
1291     # just integers.
1292     def genEnum(self, enuminfo, name, alias):
1293         pass
1294     #
1295     # Command generation
1296     def genCmd(self, cmdinfo, name, alias):
1297         decls = self.makeCDecls(cmdinfo.elem)
1298         if self.header: # In the header declare all intercepts
1299             self.appendSection('command', '')
1300             self.appendSection('command', 'static %s' % (decls[0]))
1301             if (self.featureExtraProtect != None):
1302                 self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1303             self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1304             if (self.featureExtraProtect != None):
1305                 self.intercepts += [ '#endif' ]
1306             return
1307
1308         manual_functions = [
1309             # Include functions here to be intercepted w/ manually implemented function bodies
1310             'vkGetDeviceProcAddr',
1311             'vkGetInstanceProcAddr',
1312             'vkCreateDevice',
1313             'vkDestroyDevice',
1314             'vkCreateInstance',
1315             'vkDestroyInstance',
1316             #'vkCreateDebugReportCallbackEXT',
1317             #'vkDestroyDebugReportCallbackEXT',
1318             'vkEnumerateInstanceLayerProperties',
1319             'vkEnumerateInstanceVersion',
1320             'vkEnumerateInstanceExtensionProperties',
1321             'vkEnumerateDeviceLayerProperties',
1322             'vkEnumerateDeviceExtensionProperties',
1323         ]
1324         if name in manual_functions:
1325             self.appendSection('command', '')
1326             if name not in CUSTOM_C_INTERCEPTS:
1327                 self.appendSection('command', '// declare only')
1328                 self.appendSection('command', 'static %s' % (decls[0]))
1329                 self.appendSection('command', '// TODO: Implement custom intercept body')
1330             else:
1331                 self.appendSection('command', 'static %s' % (decls[0][:-1]))
1332                 self.appendSection('command', '{\n%s}' % (CUSTOM_C_INTERCEPTS[name]))
1333             self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1334             return
1335         # record that the function will be intercepted
1336         if (self.featureExtraProtect != None):
1337             self.intercepts += [ '#ifdef %s' % self.featureExtraProtect ]
1338         self.intercepts += [ '    {"%s", (void*)%s},' % (name,name[2:]) ]
1339         if (self.featureExtraProtect != None):
1340             self.intercepts += [ '#endif' ]
1341
1342         OutputGenerator.genCmd(self, cmdinfo, name, alias)
1343         #
1344         self.appendSection('command', '')
1345         self.appendSection('command', 'static %s' % (decls[0][:-1]))
1346         if name in CUSTOM_C_INTERCEPTS:
1347             self.appendSection('command', '{%s}' % (CUSTOM_C_INTERCEPTS[name]))
1348             return
1349
1350         # Declare result variable, if any.
1351         resulttype = cmdinfo.elem.find('proto/type')
1352         if (resulttype != None and resulttype.text == 'void'):
1353             resulttype = None
1354         # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
1355         # Call the KHR custom version instead of generating separate code
1356         khr_name = name + "KHR"
1357         if khr_name in CUSTOM_C_INTERCEPTS:
1358             return_string = ''
1359             if resulttype != None:
1360                 return_string = 'return '
1361             params = cmdinfo.elem.findall('param/name')
1362             param_names = []
1363             for param in params:
1364                 param_names.append(param.text)
1365             self.appendSection('command', '{\n    %s%s(%s);\n}' % (return_string, khr_name[2:], ", ".join(param_names)))
1366             return
1367         self.appendSection('command', '{')
1368
1369         api_function_name = cmdinfo.elem.attrib.get('name')
1370         # GET THE TYPE OF FUNCTION
1371         if True in [ftxt in api_function_name for ftxt in ['Create', 'Allocate']]:
1372             # Get last param
1373             last_param = cmdinfo.elem.findall('param')[-1]
1374             lp_txt = last_param.find('name').text
1375             lp_len = None
1376             if ('len' in last_param.attrib):
1377                 lp_len = last_param.attrib['len']
1378                 lp_len = lp_len.replace('::', '->')
1379             lp_type = last_param.find('type').text
1380             handle_type = 'dispatchable'
1381             allocator_txt = 'CreateDispObjHandle()';
1382             if (self.isHandleTypeNonDispatchable(lp_type)):
1383                 handle_type = 'non-' + handle_type
1384                 allocator_txt = 'global_unique_handle++';
1385             # Need to lock in both cases
1386             self.appendSection('command', '    unique_lock_t lock(global_lock);')
1387             if (lp_len != None):
1388                 #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
1389                 self.appendSection('command', '    for (uint32_t i = 0; i < %s; ++i) {' % (lp_len))
1390                 self.appendSection('command', '        %s[i] = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1391                 self.appendSection('command', '    }')
1392             else:
1393                 #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
1394                 if 'AllocateMemory' in api_function_name:
1395                     # Store allocation size in case it's mapped
1396                     self.appendSection('command', '    allocated_memory_size_map[(VkDeviceMemory)global_unique_handle] = pAllocateInfo->allocationSize;')
1397                 self.appendSection('command', '    *%s = (%s)%s;' % (lp_txt, lp_type, allocator_txt))
1398         elif True in [ftxt in api_function_name for ftxt in ['Destroy', 'Free']]:
1399             self.appendSection('command', '//Destroy object')
1400             if 'FreeMemory' in api_function_name:
1401                 # Remove from allocation map
1402                 self.appendSection('command', '    allocated_memory_size_map.erase(memory);')
1403         else:
1404             self.appendSection('command', '//Not a CREATE or DESTROY function')
1405
1406         # Return result variable, if any.
1407         if (resulttype != None):
1408             if api_function_name == 'vkGetEventStatus':
1409                 self.appendSection('command', '    return VK_EVENT_SET;')
1410             else:
1411                 self.appendSection('command', '    return VK_SUCCESS;')
1412         self.appendSection('command', '}')
1413     #
1414     # override makeProtoName to drop the "vk" prefix
1415     def makeProtoName(self, name, tail):
1416         return self.genOpts.apientry + name[2:] + tail