1 /*-------------------------------------------------------------------------
5 * Copyright (c) 2015 The Khronos Group Inc.
6 * Copyright (c) 2015 Imagination Technologies Ltd.
7 * Copyright (c) 2015 Google Inc.
9 * Licensed under the Apache License, Version 2.0 (the "License");
10 * you may not use this file except in compliance with the License.
11 * You may obtain a copy of the License at
13 * http://www.apache.org/licenses/LICENSE-2.0
15 * Unless required by applicable law or agreed to in writing, software
16 * distributed under the License is distributed on an "AS IS" BASIS,
17 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 * See the License for the specific language governing permissions and
19 * limitations under the License.
23 * \brief Utilities for images.
24 *//*--------------------------------------------------------------------*/
26 #include "vkImageUtil.hpp"
27 #include "vkRefUtil.hpp"
28 #include "vkQueryUtil.hpp"
29 #include "vkTypeUtil.hpp"
30 #include "vkCmdUtil.hpp"
31 #include "tcuTextureUtil.hpp"
37 bool isFloatFormat (VkFormat format)
39 return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_FLOATING_POINT;
42 bool isUnormFormat (VkFormat format)
44 return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
47 bool isSnormFormat (VkFormat format)
49 return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT;
52 bool isIntFormat (VkFormat format)
54 return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER;
57 bool isUintFormat (VkFormat format)
59 return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER;
62 bool isDepthStencilFormat (VkFormat format)
64 if (isCompressedFormat(format))
67 if (isYCbCrFormat(format))
70 const tcu::TextureFormat tcuFormat = mapVkFormat(format);
71 return tcuFormat.order == tcu::TextureFormat::D || tcuFormat.order == tcu::TextureFormat::S || tcuFormat.order == tcu::TextureFormat::DS;
74 bool isSrgbFormat (VkFormat format)
76 switch (mapVkFormat(format).order)
78 case tcu::TextureFormat::sR:
79 case tcu::TextureFormat::sRG:
80 case tcu::TextureFormat::sRGB:
81 case tcu::TextureFormat::sRGBA:
82 case tcu::TextureFormat::sBGR:
83 case tcu::TextureFormat::sBGRA:
91 bool isUfloatFormat (VkFormat format)
93 DE_STATIC_ASSERT(VK_CORE_FORMAT_LAST == 185);
97 case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
98 case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
99 case VK_FORMAT_BC6H_UFLOAT_BLOCK:
107 bool isSfloatFormat (VkFormat format)
109 DE_STATIC_ASSERT(VK_CORE_FORMAT_LAST == 185);
113 case VK_FORMAT_R16_SFLOAT:
114 case VK_FORMAT_R16G16_SFLOAT:
115 case VK_FORMAT_R16G16B16_SFLOAT:
116 case VK_FORMAT_R16G16B16A16_SFLOAT:
117 case VK_FORMAT_R32_SFLOAT:
118 case VK_FORMAT_R32G32_SFLOAT:
119 case VK_FORMAT_R32G32B32_SFLOAT:
120 case VK_FORMAT_R32G32B32A32_SFLOAT:
121 case VK_FORMAT_R64_SFLOAT:
122 case VK_FORMAT_R64G64_SFLOAT:
123 case VK_FORMAT_R64G64B64_SFLOAT:
124 case VK_FORMAT_R64G64B64A64_SFLOAT:
125 case VK_FORMAT_D32_SFLOAT:
126 case VK_FORMAT_BC6H_SFLOAT_BLOCK:
134 bool isCompressedFormat (VkFormat format)
136 // update this mapping if VkFormat changes
137 DE_STATIC_ASSERT(VK_CORE_FORMAT_LAST == 185);
141 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
142 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
143 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
144 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
145 case VK_FORMAT_BC2_UNORM_BLOCK:
146 case VK_FORMAT_BC2_SRGB_BLOCK:
147 case VK_FORMAT_BC3_UNORM_BLOCK:
148 case VK_FORMAT_BC3_SRGB_BLOCK:
149 case VK_FORMAT_BC4_UNORM_BLOCK:
150 case VK_FORMAT_BC4_SNORM_BLOCK:
151 case VK_FORMAT_BC5_UNORM_BLOCK:
152 case VK_FORMAT_BC5_SNORM_BLOCK:
153 case VK_FORMAT_BC6H_UFLOAT_BLOCK:
154 case VK_FORMAT_BC6H_SFLOAT_BLOCK:
155 case VK_FORMAT_BC7_UNORM_BLOCK:
156 case VK_FORMAT_BC7_SRGB_BLOCK:
157 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
158 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
159 case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
160 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
161 case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
162 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
163 case VK_FORMAT_EAC_R11_UNORM_BLOCK:
164 case VK_FORMAT_EAC_R11_SNORM_BLOCK:
165 case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
166 case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
167 case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
168 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
169 case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
170 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
171 case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
172 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
173 case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
174 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
175 case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
176 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
177 case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
178 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
179 case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
180 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
181 case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
182 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
183 case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
184 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
185 case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
186 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
187 case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
188 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
189 case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
190 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
191 case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
192 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
193 case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
194 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
202 bool isYCbCrFormat (VkFormat format)
206 case VK_FORMAT_G8B8G8R8_422_UNORM_KHR:
207 case VK_FORMAT_B8G8R8G8_422_UNORM_KHR:
208 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR:
209 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR:
210 case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR:
211 case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR:
212 case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR:
213 case VK_FORMAT_R10X6_UNORM_PACK16_KHR:
214 case VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR:
215 case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR:
216 case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR:
217 case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR:
218 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR:
219 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR:
220 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR:
221 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR:
222 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR:
223 case VK_FORMAT_R12X4_UNORM_PACK16_KHR:
224 case VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR:
225 case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR:
226 case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR:
227 case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR:
228 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR:
229 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR:
230 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR:
231 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR:
232 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR:
233 case VK_FORMAT_G16B16G16R16_422_UNORM_KHR:
234 case VK_FORMAT_B16G16R16G16_422_UNORM_KHR:
235 case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR:
236 case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR:
237 case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR:
238 case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR:
239 case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR:
247 bool isYCbCr420Format (VkFormat format)
251 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR:
252 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR:
253 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR:
254 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR:
255 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR:
256 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR:
257 case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR:
258 case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR:
266 bool isYCbCr422Format (VkFormat format)
270 case VK_FORMAT_G8B8G8R8_422_UNORM_KHR:
271 case VK_FORMAT_B8G8R8G8_422_UNORM_KHR:
272 case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR:
273 case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR:
274 case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR:
275 case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR:
276 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR:
277 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR:
278 case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR:
279 case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR:
280 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR:
281 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR:
282 case VK_FORMAT_G16B16G16R16_422_UNORM_KHR:
283 case VK_FORMAT_B16G16R16G16_422_UNORM_KHR:
284 case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR:
285 case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR:
293 const PlanarFormatDescription& getYCbCrPlanarFormatDescription (VkFormat format)
295 using tcu::TextureFormat;
297 const deUint32 chanR = PlanarFormatDescription::CHANNEL_R;
298 const deUint32 chanG = PlanarFormatDescription::CHANNEL_G;
299 const deUint32 chanB = PlanarFormatDescription::CHANNEL_B;
300 const deUint32 chanA = PlanarFormatDescription::CHANNEL_A;
302 const deUint8 unorm = (deUint8)tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
304 static const PlanarFormatDescription s_formatInfo[] =
306 // VK_FORMAT_G8B8G8R8_422_UNORM_KHR
317 // Plane Type Offs Size Stride
318 { 0, unorm, 24, 8, 4 }, // R
319 { 0, unorm, 0, 8, 2 }, // G
320 { 0, unorm, 8, 8, 4 }, // B
324 // VK_FORMAT_B8G8R8G8_422_UNORM_KHR
335 // Plane Type Offs Size Stride
336 { 0, unorm, 16, 8, 4 }, // R
337 { 0, unorm, 8, 8, 2 }, // G
338 { 0, unorm, 0, 8, 4 }, // B
342 // VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR
353 // Plane Type Offs Size Stride
354 { 2, unorm, 0, 8, 1 }, // R
355 { 0, unorm, 0, 8, 1 }, // G
356 { 1, unorm, 0, 8, 1 }, // B
360 // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR
371 // Plane Type Offs Size Stride
372 { 1, unorm, 8, 8, 2 }, // R
373 { 0, unorm, 0, 8, 1 }, // G
374 { 1, unorm, 0, 8, 2 }, // B
378 // VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR
389 // Plane Type Offs Size Stride
390 { 2, unorm, 0, 8, 1 }, // R
391 { 0, unorm, 0, 8, 1 }, // G
392 { 1, unorm, 0, 8, 1 }, // B
396 // VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR
407 // Plane Type Offs Size Stride
408 { 1, unorm, 8, 8, 2 }, // R
409 { 0, unorm, 0, 8, 1 }, // G
410 { 1, unorm, 0, 8, 2 }, // B
414 // VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR
425 // Plane Type Offs Size Stride
426 { 2, unorm, 0, 8, 1 }, // R
427 { 0, unorm, 0, 8, 1 }, // G
428 { 1, unorm, 0, 8, 1 }, // B
432 // VK_FORMAT_R10X6_UNORM_PACK16_KHR
443 // Plane Type Offs Size Stride
444 { 0, unorm, 6, 10, 2 }, // R
450 // VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR
461 // Plane Type Offs Size Stride
462 { 0, unorm, 6, 10, 4 }, // R
463 { 0, unorm, 22, 10, 4 }, // G
468 // VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR
471 chanR|chanG|chanB|chanA,
479 // Plane Type Offs Size Stride
480 { 0, unorm, 6, 10, 8 }, // R
481 { 0, unorm, 22, 10, 8 }, // G
482 { 0, unorm, 38, 10, 8 }, // B
483 { 0, unorm, 54, 10, 8 }, // A
486 // VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR
497 // Plane Type Offs Size Stride
498 { 0, unorm, 54, 10, 8 }, // R
499 { 0, unorm, 6, 10, 4 }, // G
500 { 0, unorm, 22, 10, 8 }, // B
504 // VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR
515 // Plane Type Offs Size Stride
516 { 0, unorm, 38, 10, 8 }, // R
517 { 0, unorm, 22, 10, 4 }, // G
518 { 0, unorm, 6, 10, 8 }, // B
522 // VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR
533 // Plane Type Offs Size Stride
534 { 2, unorm, 6, 10, 2 }, // R
535 { 0, unorm, 6, 10, 2 }, // G
536 { 1, unorm, 6, 10, 2 }, // B
540 // VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR
551 // Plane Type Offs Size Stride
552 { 1, unorm, 22, 10, 4 }, // R
553 { 0, unorm, 6, 10, 2 }, // G
554 { 1, unorm, 6, 10, 4 }, // B
558 // VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR
569 // Plane Type Offs Size Stride
570 { 2, unorm, 6, 10, 2 }, // R
571 { 0, unorm, 6, 10, 2 }, // G
572 { 1, unorm, 6, 10, 2 }, // B
576 // VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR
587 // Plane Type Offs Size Stride
588 { 1, unorm, 22, 10, 4 }, // R
589 { 0, unorm, 6, 10, 2 }, // G
590 { 1, unorm, 6, 10, 4 }, // B
594 // VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR
605 // Plane Type Offs Size Stride
606 { 2, unorm, 6, 10, 2 }, // R
607 { 0, unorm, 6, 10, 2 }, // G
608 { 1, unorm, 6, 10, 2 }, // B
612 // VK_FORMAT_R12X4_UNORM_PACK16_KHR
623 // Plane Type Offs Size Stride
624 { 0, unorm, 4, 12, 2 }, // R
630 // VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR
641 // Plane Type Offs Size Stride
642 { 0, unorm, 4, 12, 4 }, // R
643 { 0, unorm, 20, 12, 4 }, // G
648 // VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR
651 chanR|chanG|chanB|chanA,
659 // Plane Type Offs Size Stride
660 { 0, unorm, 4, 12, 8 }, // R
661 { 0, unorm, 20, 12, 8 }, // G
662 { 0, unorm, 36, 12, 8 }, // B
663 { 0, unorm, 52, 12, 8 }, // A
666 // VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR
677 // Plane Type Offs Size Stride
678 { 0, unorm, 52, 12, 8 }, // R
679 { 0, unorm, 4, 12, 4 }, // G
680 { 0, unorm, 20, 12, 8 }, // B
684 // VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR
695 // Plane Type Offs Size Stride
696 { 0, unorm, 36, 12, 8 }, // R
697 { 0, unorm, 20, 12, 4 }, // G
698 { 0, unorm, 4, 12, 8 }, // B
702 // VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR
713 // Plane Type Offs Size Stride
714 { 2, unorm, 4, 12, 2 }, // R
715 { 0, unorm, 4, 12, 2 }, // G
716 { 1, unorm, 4, 12, 2 }, // B
720 // VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR
731 // Plane Type Offs Size Stride
732 { 1, unorm, 20, 12, 4 }, // R
733 { 0, unorm, 4, 12, 2 }, // G
734 { 1, unorm, 4, 12, 4 }, // B
738 // VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR
749 // Plane Type Offs Size Stride
750 { 2, unorm, 4, 12, 2 }, // R
751 { 0, unorm, 4, 12, 2 }, // G
752 { 1, unorm, 4, 12, 2 }, // B
756 // VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR
767 // Plane Type Offs Size Stride
768 { 1, unorm, 20, 12, 4 }, // R
769 { 0, unorm, 4, 12, 2 }, // G
770 { 1, unorm, 4, 12, 4 }, // B
774 // VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR
785 // Plane Type Offs Size Stride
786 { 2, unorm, 4, 12, 2 }, // R
787 { 0, unorm, 4, 12, 2 }, // G
788 { 1, unorm, 4, 12, 2 }, // B
792 // VK_FORMAT_G16B16G16R16_422_UNORM_KHR
803 // Plane Type Offs Size Stride
804 { 0, unorm, 48, 16, 8 }, // R
805 { 0, unorm, 0, 16, 4 }, // G
806 { 0, unorm, 16, 16, 8 }, // B
810 // VK_FORMAT_B16G16R16G16_422_UNORM_KHR
821 // Plane Type Offs Size Stride
822 { 0, unorm, 32, 16, 8 }, // R
823 { 0, unorm, 16, 16, 4 }, // G
824 { 0, unorm, 0, 16, 8 }, // B
828 // VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR
839 // Plane Type Offs Size Stride
840 { 2, unorm, 0, 16, 2 }, // R
841 { 0, unorm, 0, 16, 2 }, // G
842 { 1, unorm, 0, 16, 2 }, // B
846 // VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR
857 // Plane Type Offs Size Stride
858 { 1, unorm, 16, 16, 4 }, // R
859 { 0, unorm, 0, 16, 2 }, // G
860 { 1, unorm, 0, 16, 4 }, // B
864 // VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR
875 // Plane Type Offs Size Stride
876 { 2, unorm, 0, 16, 2 }, // R
877 { 0, unorm, 0, 16, 2 }, // G
878 { 1, unorm, 0, 16, 2 }, // B
882 // VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR
893 // Plane Type Offs Size Stride
894 { 1, unorm, 16, 16, 4 }, // R
895 { 0, unorm, 0, 16, 2 }, // G
896 { 1, unorm, 0, 16, 4 }, // B
900 // VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR
911 // Plane Type Offs Size Stride
912 { 2, unorm, 0, 16, 2 }, // R
913 { 0, unorm, 0, 16, 2 }, // G
914 { 1, unorm, 0, 16, 2 }, // B
920 const size_t offset = (size_t)VK_FORMAT_G8B8G8R8_422_UNORM_KHR;
922 DE_ASSERT(de::inBounds<size_t>((size_t)format, offset, offset+(size_t)DE_LENGTH_OF_ARRAY(s_formatInfo)));
924 return s_formatInfo[(size_t)format-offset];
927 PlanarFormatDescription getCorePlanarFormatDescription (VkFormat format)
929 const deUint8 unorm = (deUint8)tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
930 const deUint8 uint = (deUint8)tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER;
931 const deUint8 sfloat = (deUint8)tcu::TEXTURECHANNELCLASS_FLOATING_POINT;
933 const deUint8 chanR = (deUint8)PlanarFormatDescription::CHANNEL_R;
934 const deUint8 chanG = (deUint8)PlanarFormatDescription::CHANNEL_G;
935 const deUint8 chanB = (deUint8)PlanarFormatDescription::CHANNEL_B;
936 const deUint8 chanA = (deUint8)PlanarFormatDescription::CHANNEL_A;
938 DE_ASSERT(de::inBounds<deUint32>(format, VK_FORMAT_UNDEFINED+1, VK_CORE_FORMAT_LAST));
940 #if (DE_ENDIANNESS != DE_LITTLE_ENDIAN)
941 # error "Big-endian is not supported"
946 case VK_FORMAT_R8_UNORM:
948 const PlanarFormatDescription desc =
959 // Plane Type Offs Size Stride
960 { 0, unorm, 0, 8, 1 }, // R
961 { 0, 0, 0, 0, 0 }, // G
962 { 0, 0, 0, 0, 0 }, // B
963 { 0, 0, 0, 0, 0 } // A
969 case VK_FORMAT_R8G8_UNORM:
971 const PlanarFormatDescription desc =
982 // Plane Type Offs Size Stride
983 { 0, unorm, 0, 8, 2 }, // R
984 { 0, unorm, 8, 8, 2 }, // G
985 { 0, 0, 0, 0, 0 }, // B
986 { 0, 0, 0, 0, 0 } // A
992 case VK_FORMAT_R16_UNORM:
994 const PlanarFormatDescription desc =
1005 // Plane Type Offs Size Stride
1006 { 0, unorm, 0, 16, 2 }, // R
1007 { 0, 0, 0, 0, 0 }, // G
1008 { 0, 0, 0, 0, 0 }, // B
1009 { 0, 0, 0, 0, 0 } // A
1015 case VK_FORMAT_R16G16_UNORM:
1017 const PlanarFormatDescription desc =
1028 // Plane Type Offs Size Stride
1029 { 0, unorm, 0, 16, 4 }, // R
1030 { 0, unorm, 16, 16, 4 }, // G
1031 { 0, 0, 0, 0, 0 }, // B
1032 { 0, 0, 0, 0, 0 } // A
1038 case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
1040 const PlanarFormatDescription desc =
1051 // Plane Type Offs Size Stride
1052 { 0, unorm, 0, 11, 4 }, // R
1053 { 0, unorm, 11, 11, 4 }, // G
1054 { 0, unorm, 22, 10, 4 }, // B
1055 { 0, 0, 0, 0, 0 } // A
1061 case VK_FORMAT_R4G4_UNORM_PACK8:
1063 const PlanarFormatDescription desc =
1074 // Plane Type Offs Size Stride
1075 { 0, unorm, 4, 4, 1 }, // R
1076 { 0, unorm, 0, 4, 1 }, // G
1077 { 0, 0, 0, 0, 0 }, // B
1078 { 0, 0, 0, 0, 0 } // A
1084 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
1086 const PlanarFormatDescription desc =
1089 chanR|chanG|chanB|chanA,
1097 // Plane Type Offs Size Stride
1098 { 0, unorm, 12, 4, 2 }, // R
1099 { 0, unorm, 8, 4, 2 }, // G
1100 { 0, unorm, 4, 4, 2 }, // B
1101 { 0, unorm, 0, 4, 2 } // A
1107 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
1109 const PlanarFormatDescription desc =
1112 chanR|chanG|chanB|chanA,
1120 // Plane Type Offs Size Stride
1121 { 0, unorm, 4, 4, 2 }, // R
1122 { 0, unorm, 8, 4, 2 }, // G
1123 { 0, unorm, 12, 4, 2 }, // B
1124 { 0, unorm, 0, 4, 2 } // A
1130 case VK_FORMAT_R5G6B5_UNORM_PACK16:
1132 const PlanarFormatDescription desc =
1143 // Plane Type Offs Size Stride
1144 { 0, unorm, 11, 5, 2 }, // R
1145 { 0, unorm, 5, 6, 2 }, // G
1146 { 0, unorm, 0, 5, 2 }, // B
1147 { 0, 0, 0, 0, 0 } // A
1153 case VK_FORMAT_B5G6R5_UNORM_PACK16:
1155 const PlanarFormatDescription desc =
1166 // Plane Type Offs Size Stride
1167 { 0, unorm, 0, 5, 2 }, // R
1168 { 0, unorm, 5, 6, 2 }, // G
1169 { 0, unorm, 11, 5, 2 }, // B
1170 { 0, 0, 0, 0, 0 } // A
1176 case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
1178 const PlanarFormatDescription desc =
1181 chanR|chanG|chanB|chanA,
1189 // Plane Type Offs Size Stride
1190 { 0, unorm, 11, 5, 2 }, // R
1191 { 0, unorm, 6, 5, 2 }, // G
1192 { 0, unorm, 1, 5, 2 }, // B
1193 { 0, unorm, 0, 1, 2 } // A
1199 case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
1201 const PlanarFormatDescription desc =
1204 chanR|chanG|chanB|chanA,
1212 // Plane Type Offs Size Stride
1213 { 0, unorm, 1, 5, 2 }, // R
1214 { 0, unorm, 6, 5, 2 }, // G
1215 { 0, unorm, 11, 5, 2 }, // B
1216 { 0, unorm, 0, 1, 2 } // A
1222 case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
1224 const PlanarFormatDescription desc =
1227 chanR|chanG|chanB|chanA,
1235 // Plane Type Offs Size Stride
1236 { 0, unorm, 10, 5, 2 }, // R
1237 { 0, unorm, 5, 5, 2 }, // G
1238 { 0, unorm, 0, 5, 2 }, // B
1239 { 0, unorm, 15, 1, 2 } // A
1245 case VK_FORMAT_R8G8B8_UNORM:
1247 const PlanarFormatDescription desc =
1258 // Plane Type Offs Size Stride
1259 { 0, unorm, 0, 8, 3 }, // R
1260 { 0, unorm, 8, 8, 3 }, // G
1261 { 0, unorm, 16, 8, 3 }, // B
1262 { 0, 0, 0, 0, 0 } // A
1268 case VK_FORMAT_B8G8R8_UNORM:
1270 const PlanarFormatDescription desc =
1281 // Plane Type Offs Size Stride
1282 { 0, unorm, 16, 8, 3 }, // R
1283 { 0, unorm, 8, 8, 3 }, // G
1284 { 0, unorm, 0, 8, 3 }, // B
1285 { 0, 0, 0, 0, 0 } // A
1291 case VK_FORMAT_R8G8B8A8_UNORM:
1292 case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
1294 const PlanarFormatDescription desc =
1297 chanR|chanG|chanB|chanA,
1305 // Plane Type Offs Size Stride
1306 { 0, unorm, 0, 8, 4 }, // R
1307 { 0, unorm, 8, 8, 4 }, // G
1308 { 0, unorm, 16, 8, 4 }, // B
1309 { 0, unorm, 24, 8, 4 } // A
1315 case VK_FORMAT_B8G8R8A8_UNORM:
1317 const PlanarFormatDescription desc =
1320 chanR|chanG|chanB|chanA,
1328 // Plane Type Offs Size Stride
1329 { 0, unorm, 16, 8, 4 }, // R
1330 { 0, unorm, 8, 8, 4 }, // G
1331 { 0, unorm, 0, 8, 4 }, // B
1332 { 0, unorm, 24, 8, 4 } // A
1338 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
1340 const PlanarFormatDescription desc =
1343 chanR|chanG|chanB|chanA,
1351 // Plane Type Offs Size Stride
1352 { 0, unorm, 20, 10, 4 }, // R
1353 { 0, unorm, 10, 10, 4 }, // G
1354 { 0, unorm, 0, 10, 4 }, // B
1355 { 0, unorm, 30, 2, 4 } // A
1361 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
1363 const PlanarFormatDescription desc =
1366 chanR|chanG|chanB|chanA,
1374 // Plane Type Offs Size Stride
1375 { 0, unorm, 0, 10, 4 }, // R
1376 { 0, unorm, 10, 10, 4 }, // G
1377 { 0, unorm, 20, 10, 4 }, // B
1378 { 0, unorm, 30, 2, 4 } // A
1384 case VK_FORMAT_R16G16B16_UNORM:
1386 const PlanarFormatDescription desc =
1397 // Plane Type Offs Size Stride
1398 { 0, unorm, 0, 16, 6 }, // R
1399 { 0, unorm, 16, 16, 6 }, // G
1400 { 0, unorm, 32, 16, 6 }, // B
1401 { 0, 0, 0, 0, 0 } // A
1407 case VK_FORMAT_R16G16B16A16_UNORM:
1409 const PlanarFormatDescription desc =
1412 chanR|chanG|chanB|chanA,
1420 // Plane Type Offs Size Stride
1421 { 0, unorm, 0, 16, 8 }, // R
1422 { 0, unorm, 16, 16, 8 }, // G
1423 { 0, unorm, 32, 16, 8 }, // B
1424 { 0, unorm, 48, 16, 8 } // A
1430 case VK_FORMAT_R16_UINT:
1432 const PlanarFormatDescription desc =
1443 // Plane Type Offs Size Stride
1444 { 0, uint, 0, 2, 2 }, // R
1450 case VK_FORMAT_R32_UINT:
1452 const PlanarFormatDescription desc =
1463 // Plane Type Offs Size Stride
1464 { 0, uint, 0, 4, 4 }, // R
1470 case VK_FORMAT_R32_SFLOAT:
1471 case VK_FORMAT_D32_SFLOAT:
1473 const PlanarFormatDescription desc =
1484 // Plane Type Offs Size Stride
1485 { 0, sfloat, 0, 32, 4 }, // R
1491 case VK_FORMAT_D16_UNORM:
1493 const PlanarFormatDescription desc =
1504 // Plane Type Offs Size Stride
1505 { 0, unorm, 0, 16, 2 }, // R
1511 case VK_FORMAT_S8_UINT:
1513 const PlanarFormatDescription desc =
1524 // Plane Type Offs Size Stride
1525 { 0, uint, 0, 8, 1 }, // R
1531 case VK_FORMAT_R16G16B16A16_UINT:
1533 const PlanarFormatDescription desc =
1536 chanR|chanG|chanB|chanA,
1544 // Plane Type Offs Size Stride
1545 { 0, uint, 0, 16, 8 }, // R
1546 { 0, uint, 16, 16, 8 }, // G
1547 { 0, uint, 32, 16, 8 }, // B
1548 { 0, uint, 48, 16, 8 }, // A
1554 case VK_FORMAT_R32G32B32A32_UINT:
1556 const PlanarFormatDescription desc =
1559 chanR|chanG|chanB|chanA,
1567 // Plane Type Offs Size Stride
1568 { 0, uint, 0, 32, 16 }, // R
1569 { 0, uint, 32, 32, 16 }, // G
1570 { 0, uint, 64, 32, 16 }, // B
1571 { 0, uint, 96, 32, 16 }, // A
1577 case VK_FORMAT_R32G32B32A32_SFLOAT:
1579 const PlanarFormatDescription desc =
1582 chanR|chanG|chanB|chanA,
1590 // Plane Type Offs Size Stride
1591 { 0, sfloat, 0, 32, 16 }, // R
1592 { 0, sfloat, 32, 32, 16 }, // G
1593 { 0, sfloat, 64, 32, 16 }, // B
1594 { 0, sfloat, 96, 32, 16 }, // A
1601 TCU_THROW(InternalError, "Not implemented");
1605 PlanarFormatDescription getPlanarFormatDescription (VkFormat format)
1607 if (isYCbCrFormat(format))
1608 return getYCbCrPlanarFormatDescription(format);
1610 return getCorePlanarFormatDescription(format);
1613 int getPlaneCount (VkFormat format)
1617 case VK_FORMAT_G8B8G8R8_422_UNORM_KHR:
1618 case VK_FORMAT_B8G8R8G8_422_UNORM_KHR:
1619 case VK_FORMAT_R10X6_UNORM_PACK16_KHR:
1620 case VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR:
1621 case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR:
1622 case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR:
1623 case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR:
1624 case VK_FORMAT_R12X4_UNORM_PACK16_KHR:
1625 case VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR:
1626 case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR:
1627 case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR:
1628 case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR:
1629 case VK_FORMAT_G16B16G16R16_422_UNORM_KHR:
1630 case VK_FORMAT_B16G16R16G16_422_UNORM_KHR:
1633 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR:
1634 case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR:
1635 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR:
1636 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR:
1637 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR:
1638 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR:
1639 case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR:
1640 case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR:
1643 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR:
1644 case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR:
1645 case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR:
1646 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR:
1647 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR:
1648 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR:
1649 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR:
1650 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR:
1651 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR:
1652 case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR:
1653 case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR:
1654 case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR:
1658 DE_FATAL("Not YCbCr format");
1663 VkImageAspectFlagBits getPlaneAspect (deUint32 planeNdx)
1665 DE_ASSERT(de::inBounds(planeNdx, 0u, 3u));
1666 return (VkImageAspectFlagBits)(VK_IMAGE_ASPECT_PLANE_0_BIT_KHR << planeNdx);
1669 deUint32 getAspectPlaneNdx (VkImageAspectFlagBits flags)
1673 case VK_IMAGE_ASPECT_PLANE_0_BIT_KHR: return 0;
1674 case VK_IMAGE_ASPECT_PLANE_1_BIT_KHR: return 1;
1675 case VK_IMAGE_ASPECT_PLANE_2_BIT_KHR: return 2;
1677 DE_FATAL("Invalid plane aspect");
1682 bool isChromaSubsampled (VkFormat format)
1686 case VK_FORMAT_G8B8G8R8_422_UNORM_KHR:
1687 case VK_FORMAT_B8G8R8G8_422_UNORM_KHR:
1688 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR:
1689 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR:
1690 case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR:
1691 case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR:
1692 case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR:
1693 case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR:
1694 case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR:
1695 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR:
1696 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR:
1697 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR:
1698 case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR:
1699 case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR:
1700 case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR:
1701 case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR:
1702 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR:
1703 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR:
1704 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR:
1705 case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR:
1706 case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR:
1707 case VK_FORMAT_G16B16G16R16_422_UNORM_KHR:
1708 case VK_FORMAT_B16G16R16G16_422_UNORM_KHR:
1709 case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR:
1710 case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR:
1711 case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR:
1712 case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR:
1713 case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR:
1721 bool isSupportedByFramework (VkFormat format)
1723 if (format == VK_FORMAT_UNDEFINED || format > VK_CORE_FORMAT_LAST)
1728 case VK_FORMAT_R64_UINT:
1729 case VK_FORMAT_R64_SINT:
1730 case VK_FORMAT_R64_SFLOAT:
1731 case VK_FORMAT_R64G64_UINT:
1732 case VK_FORMAT_R64G64_SINT:
1733 case VK_FORMAT_R64G64_SFLOAT:
1734 case VK_FORMAT_R64G64B64_UINT:
1735 case VK_FORMAT_R64G64B64_SINT:
1736 case VK_FORMAT_R64G64B64_SFLOAT:
1737 case VK_FORMAT_R64G64B64A64_UINT:
1738 case VK_FORMAT_R64G64B64A64_SINT:
1739 case VK_FORMAT_R64G64B64A64_SFLOAT:
1740 // \todo [2016-12-01 pyry] Support 64-bit channel types
1743 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
1744 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
1745 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
1746 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
1747 case VK_FORMAT_BC2_UNORM_BLOCK:
1748 case VK_FORMAT_BC2_SRGB_BLOCK:
1749 case VK_FORMAT_BC3_UNORM_BLOCK:
1750 case VK_FORMAT_BC3_SRGB_BLOCK:
1751 case VK_FORMAT_BC4_UNORM_BLOCK:
1752 case VK_FORMAT_BC4_SNORM_BLOCK:
1753 case VK_FORMAT_BC5_UNORM_BLOCK:
1754 case VK_FORMAT_BC5_SNORM_BLOCK:
1755 case VK_FORMAT_BC6H_UFLOAT_BLOCK:
1756 case VK_FORMAT_BC6H_SFLOAT_BLOCK:
1757 case VK_FORMAT_BC7_UNORM_BLOCK:
1758 case VK_FORMAT_BC7_SRGB_BLOCK:
1766 void checkImageSupport (const InstanceInterface& vki, VkPhysicalDevice physicalDevice, const VkImageCreateInfo& imageCreateInfo)
1768 VkImageFormatProperties imageFormatProperties;
1770 if (vki.getPhysicalDeviceImageFormatProperties(physicalDevice, imageCreateInfo.format, imageCreateInfo.imageType,
1771 imageCreateInfo.tiling, imageCreateInfo.usage, imageCreateInfo.flags,
1772 &imageFormatProperties))
1774 TCU_THROW(NotSupportedError, "Image format not supported.");
1776 if (((VkSampleCountFlagBits)imageFormatProperties.sampleCounts & imageCreateInfo.samples) == 0)
1778 TCU_THROW(NotSupportedError, "Sample count not supported.");
1780 if (imageFormatProperties.maxArrayLayers < imageCreateInfo.arrayLayers)
1782 TCU_THROW(NotSupportedError, "Layer count not supported.");
1786 VkFormat mapTextureFormat (const tcu::TextureFormat& format)
1788 DE_STATIC_ASSERT(tcu::TextureFormat::CHANNELORDER_LAST < (1<<16));
1789 DE_STATIC_ASSERT(tcu::TextureFormat::CHANNELTYPE_LAST < (1<<16));
1791 #define PACK_FMT(ORDER, TYPE) ((int(ORDER) << 16) | int(TYPE))
1792 #define FMT_CASE(ORDER, TYPE) PACK_FMT(tcu::TextureFormat::ORDER, tcu::TextureFormat::TYPE)
1794 // update this mapping if VkFormat changes
1795 DE_STATIC_ASSERT(VK_CORE_FORMAT_LAST == 185);
1797 switch (PACK_FMT(format.order, format.type))
1799 case FMT_CASE(RG, UNORM_BYTE_44): return VK_FORMAT_R4G4_UNORM_PACK8;
1800 case FMT_CASE(RGB, UNORM_SHORT_565): return VK_FORMAT_R5G6B5_UNORM_PACK16;
1801 case FMT_CASE(RGBA, UNORM_SHORT_4444): return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
1802 case FMT_CASE(RGBA, UNORM_SHORT_5551): return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
1804 case FMT_CASE(BGR, UNORM_SHORT_565): return VK_FORMAT_B5G6R5_UNORM_PACK16;
1805 case FMT_CASE(BGRA, UNORM_SHORT_4444): return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
1806 case FMT_CASE(BGRA, UNORM_SHORT_5551): return VK_FORMAT_B5G5R5A1_UNORM_PACK16;
1808 case FMT_CASE(ARGB, UNORM_SHORT_1555): return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
1810 case FMT_CASE(R, UNORM_INT8): return VK_FORMAT_R8_UNORM;
1811 case FMT_CASE(R, SNORM_INT8): return VK_FORMAT_R8_SNORM;
1812 case FMT_CASE(R, UNSIGNED_INT8): return VK_FORMAT_R8_UINT;
1813 case FMT_CASE(R, SIGNED_INT8): return VK_FORMAT_R8_SINT;
1814 case FMT_CASE(sR, UNORM_INT8): return VK_FORMAT_R8_SRGB;
1816 case FMT_CASE(RG, UNORM_INT8): return VK_FORMAT_R8G8_UNORM;
1817 case FMT_CASE(RG, SNORM_INT8): return VK_FORMAT_R8G8_SNORM;
1818 case FMT_CASE(RG, UNSIGNED_INT8): return VK_FORMAT_R8G8_UINT;
1819 case FMT_CASE(RG, SIGNED_INT8): return VK_FORMAT_R8G8_SINT;
1820 case FMT_CASE(sRG, UNORM_INT8): return VK_FORMAT_R8G8_SRGB;
1822 case FMT_CASE(RGB, UNORM_INT8): return VK_FORMAT_R8G8B8_UNORM;
1823 case FMT_CASE(RGB, SNORM_INT8): return VK_FORMAT_R8G8B8_SNORM;
1824 case FMT_CASE(RGB, UNSIGNED_INT8): return VK_FORMAT_R8G8B8_UINT;
1825 case FMT_CASE(RGB, SIGNED_INT8): return VK_FORMAT_R8G8B8_SINT;
1826 case FMT_CASE(sRGB, UNORM_INT8): return VK_FORMAT_R8G8B8_SRGB;
1828 case FMT_CASE(RGBA, UNORM_INT8): return VK_FORMAT_R8G8B8A8_UNORM;
1829 case FMT_CASE(RGBA, SNORM_INT8): return VK_FORMAT_R8G8B8A8_SNORM;
1830 case FMT_CASE(RGBA, UNSIGNED_INT8): return VK_FORMAT_R8G8B8A8_UINT;
1831 case FMT_CASE(RGBA, SIGNED_INT8): return VK_FORMAT_R8G8B8A8_SINT;
1832 case FMT_CASE(sRGBA, UNORM_INT8): return VK_FORMAT_R8G8B8A8_SRGB;
1834 case FMT_CASE(RGBA, UNORM_INT_1010102_REV): return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
1835 case FMT_CASE(RGBA, SNORM_INT_1010102_REV): return VK_FORMAT_A2B10G10R10_SNORM_PACK32;
1836 case FMT_CASE(RGBA, UNSIGNED_INT_1010102_REV): return VK_FORMAT_A2B10G10R10_UINT_PACK32;
1837 case FMT_CASE(RGBA, SIGNED_INT_1010102_REV): return VK_FORMAT_A2B10G10R10_SINT_PACK32;
1839 case FMT_CASE(R, UNORM_INT16): return VK_FORMAT_R16_UNORM;
1840 case FMT_CASE(R, SNORM_INT16): return VK_FORMAT_R16_SNORM;
1841 case FMT_CASE(R, UNSIGNED_INT16): return VK_FORMAT_R16_UINT;
1842 case FMT_CASE(R, SIGNED_INT16): return VK_FORMAT_R16_SINT;
1843 case FMT_CASE(R, HALF_FLOAT): return VK_FORMAT_R16_SFLOAT;
1845 case FMT_CASE(RG, UNORM_INT16): return VK_FORMAT_R16G16_UNORM;
1846 case FMT_CASE(RG, SNORM_INT16): return VK_FORMAT_R16G16_SNORM;
1847 case FMT_CASE(RG, UNSIGNED_INT16): return VK_FORMAT_R16G16_UINT;
1848 case FMT_CASE(RG, SIGNED_INT16): return VK_FORMAT_R16G16_SINT;
1849 case FMT_CASE(RG, HALF_FLOAT): return VK_FORMAT_R16G16_SFLOAT;
1851 case FMT_CASE(RGB, UNORM_INT16): return VK_FORMAT_R16G16B16_UNORM;
1852 case FMT_CASE(RGB, SNORM_INT16): return VK_FORMAT_R16G16B16_SNORM;
1853 case FMT_CASE(RGB, UNSIGNED_INT16): return VK_FORMAT_R16G16B16_UINT;
1854 case FMT_CASE(RGB, SIGNED_INT16): return VK_FORMAT_R16G16B16_SINT;
1855 case FMT_CASE(RGB, HALF_FLOAT): return VK_FORMAT_R16G16B16_SFLOAT;
1857 case FMT_CASE(RGBA, UNORM_INT16): return VK_FORMAT_R16G16B16A16_UNORM;
1858 case FMT_CASE(RGBA, SNORM_INT16): return VK_FORMAT_R16G16B16A16_SNORM;
1859 case FMT_CASE(RGBA, UNSIGNED_INT16): return VK_FORMAT_R16G16B16A16_UINT;
1860 case FMT_CASE(RGBA, SIGNED_INT16): return VK_FORMAT_R16G16B16A16_SINT;
1861 case FMT_CASE(RGBA, HALF_FLOAT): return VK_FORMAT_R16G16B16A16_SFLOAT;
1863 case FMT_CASE(R, UNSIGNED_INT32): return VK_FORMAT_R32_UINT;
1864 case FMT_CASE(R, SIGNED_INT32): return VK_FORMAT_R32_SINT;
1865 case FMT_CASE(R, FLOAT): return VK_FORMAT_R32_SFLOAT;
1867 case FMT_CASE(RG, UNSIGNED_INT32): return VK_FORMAT_R32G32_UINT;
1868 case FMT_CASE(RG, SIGNED_INT32): return VK_FORMAT_R32G32_SINT;
1869 case FMT_CASE(RG, FLOAT): return VK_FORMAT_R32G32_SFLOAT;
1871 case FMT_CASE(RGB, UNSIGNED_INT32): return VK_FORMAT_R32G32B32_UINT;
1872 case FMT_CASE(RGB, SIGNED_INT32): return VK_FORMAT_R32G32B32_SINT;
1873 case FMT_CASE(RGB, FLOAT): return VK_FORMAT_R32G32B32_SFLOAT;
1875 case FMT_CASE(RGBA, UNSIGNED_INT32): return VK_FORMAT_R32G32B32A32_UINT;
1876 case FMT_CASE(RGBA, SIGNED_INT32): return VK_FORMAT_R32G32B32A32_SINT;
1877 case FMT_CASE(RGBA, FLOAT): return VK_FORMAT_R32G32B32A32_SFLOAT;
1879 case FMT_CASE(R, FLOAT64): return VK_FORMAT_R64_SFLOAT;
1880 case FMT_CASE(RG, FLOAT64): return VK_FORMAT_R64G64_SFLOAT;
1881 case FMT_CASE(RGB, FLOAT64): return VK_FORMAT_R64G64B64_SFLOAT;
1882 case FMT_CASE(RGBA, FLOAT64): return VK_FORMAT_R64G64B64A64_SFLOAT;
1884 case FMT_CASE(RGB, UNSIGNED_INT_11F_11F_10F_REV): return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
1885 case FMT_CASE(RGB, UNSIGNED_INT_999_E5_REV): return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
1887 case FMT_CASE(BGR, UNORM_INT8): return VK_FORMAT_B8G8R8_UNORM;
1888 case FMT_CASE(BGR, SNORM_INT8): return VK_FORMAT_B8G8R8_SNORM;
1889 case FMT_CASE(BGR, UNSIGNED_INT8): return VK_FORMAT_B8G8R8_UINT;
1890 case FMT_CASE(BGR, SIGNED_INT8): return VK_FORMAT_B8G8R8_SINT;
1891 case FMT_CASE(sBGR, UNORM_INT8): return VK_FORMAT_B8G8R8_SRGB;
1893 case FMT_CASE(BGRA, UNORM_INT8): return VK_FORMAT_B8G8R8A8_UNORM;
1894 case FMT_CASE(BGRA, SNORM_INT8): return VK_FORMAT_B8G8R8A8_SNORM;
1895 case FMT_CASE(BGRA, UNSIGNED_INT8): return VK_FORMAT_B8G8R8A8_UINT;
1896 case FMT_CASE(BGRA, SIGNED_INT8): return VK_FORMAT_B8G8R8A8_SINT;
1897 case FMT_CASE(sBGRA, UNORM_INT8): return VK_FORMAT_B8G8R8A8_SRGB;
1899 case FMT_CASE(BGRA, UNORM_INT_1010102_REV): return VK_FORMAT_A2R10G10B10_UNORM_PACK32;
1900 case FMT_CASE(BGRA, SNORM_INT_1010102_REV): return VK_FORMAT_A2R10G10B10_SNORM_PACK32;
1901 case FMT_CASE(BGRA, UNSIGNED_INT_1010102_REV): return VK_FORMAT_A2R10G10B10_UINT_PACK32;
1902 case FMT_CASE(BGRA, SIGNED_INT_1010102_REV): return VK_FORMAT_A2R10G10B10_SINT_PACK32;
1904 case FMT_CASE(D, UNORM_INT16): return VK_FORMAT_D16_UNORM;
1905 case FMT_CASE(D, UNSIGNED_INT_24_8_REV): return VK_FORMAT_X8_D24_UNORM_PACK32;
1906 case FMT_CASE(D, FLOAT): return VK_FORMAT_D32_SFLOAT;
1908 case FMT_CASE(S, UNSIGNED_INT8): return VK_FORMAT_S8_UINT;
1910 case FMT_CASE(DS, UNSIGNED_INT_16_8_8): return VK_FORMAT_D16_UNORM_S8_UINT;
1911 case FMT_CASE(DS, UNSIGNED_INT_24_8_REV): return VK_FORMAT_D24_UNORM_S8_UINT;
1912 case FMT_CASE(DS, FLOAT_UNSIGNED_INT_24_8_REV): return VK_FORMAT_D32_SFLOAT_S8_UINT;
1915 case FMT_CASE(R, UNORM_SHORT_10): return VK_FORMAT_R10X6_UNORM_PACK16_KHR;
1916 case FMT_CASE(RG, UNORM_SHORT_10): return VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR;
1917 case FMT_CASE(RGBA, UNORM_SHORT_10): return VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR;
1919 case FMT_CASE(R, UNORM_SHORT_12): return VK_FORMAT_R12X4_UNORM_PACK16_KHR;
1920 case FMT_CASE(RG, UNORM_SHORT_12): return VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR;
1921 case FMT_CASE(RGBA, UNORM_SHORT_12): return VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR;
1924 TCU_THROW(InternalError, "Unknown texture format");
1931 VkFormat mapCompressedTextureFormat (const tcu::CompressedTexFormat format)
1933 // update this mapping if CompressedTexFormat changes
1934 DE_STATIC_ASSERT(tcu::COMPRESSEDTEXFORMAT_LAST == 55);
1938 case tcu::COMPRESSEDTEXFORMAT_ETC2_RGB8: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
1939 case tcu::COMPRESSEDTEXFORMAT_ETC2_SRGB8: return VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK;
1940 case tcu::COMPRESSEDTEXFORMAT_ETC2_RGB8_PUNCHTHROUGH_ALPHA1: return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
1941 case tcu::COMPRESSEDTEXFORMAT_ETC2_SRGB8_PUNCHTHROUGH_ALPHA1: return VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK;
1942 case tcu::COMPRESSEDTEXFORMAT_ETC2_EAC_RGBA8: return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
1943 case tcu::COMPRESSEDTEXFORMAT_ETC2_EAC_SRGB8_ALPHA8: return VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK;
1945 case tcu::COMPRESSEDTEXFORMAT_EAC_R11: return VK_FORMAT_EAC_R11_UNORM_BLOCK;
1946 case tcu::COMPRESSEDTEXFORMAT_EAC_SIGNED_R11: return VK_FORMAT_EAC_R11_SNORM_BLOCK;
1947 case tcu::COMPRESSEDTEXFORMAT_EAC_RG11: return VK_FORMAT_EAC_R11G11_UNORM_BLOCK;
1948 case tcu::COMPRESSEDTEXFORMAT_EAC_SIGNED_RG11: return VK_FORMAT_EAC_R11G11_SNORM_BLOCK;
1950 case tcu::COMPRESSEDTEXFORMAT_ASTC_4x4_RGBA: return VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
1951 case tcu::COMPRESSEDTEXFORMAT_ASTC_4x4_SRGB8_ALPHA8: return VK_FORMAT_ASTC_4x4_SRGB_BLOCK;
1952 case tcu::COMPRESSEDTEXFORMAT_ASTC_5x4_RGBA: return VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
1953 case tcu::COMPRESSEDTEXFORMAT_ASTC_5x4_SRGB8_ALPHA8: return VK_FORMAT_ASTC_5x4_SRGB_BLOCK;
1954 case tcu::COMPRESSEDTEXFORMAT_ASTC_5x5_RGBA: return VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
1955 case tcu::COMPRESSEDTEXFORMAT_ASTC_5x5_SRGB8_ALPHA8: return VK_FORMAT_ASTC_5x5_SRGB_BLOCK;
1956 case tcu::COMPRESSEDTEXFORMAT_ASTC_6x5_RGBA: return VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
1957 case tcu::COMPRESSEDTEXFORMAT_ASTC_6x5_SRGB8_ALPHA8: return VK_FORMAT_ASTC_6x5_SRGB_BLOCK;
1958 case tcu::COMPRESSEDTEXFORMAT_ASTC_6x6_RGBA: return VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
1959 case tcu::COMPRESSEDTEXFORMAT_ASTC_6x6_SRGB8_ALPHA8: return VK_FORMAT_ASTC_6x6_SRGB_BLOCK;
1960 case tcu::COMPRESSEDTEXFORMAT_ASTC_8x5_RGBA: return VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
1961 case tcu::COMPRESSEDTEXFORMAT_ASTC_8x5_SRGB8_ALPHA8: return VK_FORMAT_ASTC_8x5_SRGB_BLOCK;
1962 case tcu::COMPRESSEDTEXFORMAT_ASTC_8x6_RGBA: return VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
1963 case tcu::COMPRESSEDTEXFORMAT_ASTC_8x6_SRGB8_ALPHA8: return VK_FORMAT_ASTC_8x6_SRGB_BLOCK;
1964 case tcu::COMPRESSEDTEXFORMAT_ASTC_8x8_RGBA: return VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
1965 case tcu::COMPRESSEDTEXFORMAT_ASTC_8x8_SRGB8_ALPHA8: return VK_FORMAT_ASTC_8x8_SRGB_BLOCK;
1966 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x5_RGBA: return VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
1967 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x5_SRGB8_ALPHA8: return VK_FORMAT_ASTC_10x5_SRGB_BLOCK;
1968 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x6_RGBA: return VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
1969 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x6_SRGB8_ALPHA8: return VK_FORMAT_ASTC_10x6_SRGB_BLOCK;
1970 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x8_RGBA: return VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
1971 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x8_SRGB8_ALPHA8: return VK_FORMAT_ASTC_10x8_SRGB_BLOCK;
1972 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x10_RGBA: return VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
1973 case tcu::COMPRESSEDTEXFORMAT_ASTC_10x10_SRGB8_ALPHA8: return VK_FORMAT_ASTC_10x10_SRGB_BLOCK;
1974 case tcu::COMPRESSEDTEXFORMAT_ASTC_12x10_RGBA: return VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
1975 case tcu::COMPRESSEDTEXFORMAT_ASTC_12x10_SRGB8_ALPHA8: return VK_FORMAT_ASTC_12x10_SRGB_BLOCK;
1976 case tcu::COMPRESSEDTEXFORMAT_ASTC_12x12_RGBA: return VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
1977 case tcu::COMPRESSEDTEXFORMAT_ASTC_12x12_SRGB8_ALPHA8: return VK_FORMAT_ASTC_12x12_SRGB_BLOCK;
1979 case tcu::COMPRESSEDTEXFORMAT_BC1_RGB_UNORM_BLOCK: return VK_FORMAT_BC1_RGB_UNORM_BLOCK;
1980 case tcu::COMPRESSEDTEXFORMAT_BC1_RGB_SRGB_BLOCK: return VK_FORMAT_BC1_RGB_SRGB_BLOCK;
1981 case tcu::COMPRESSEDTEXFORMAT_BC1_RGBA_UNORM_BLOCK: return VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
1982 case tcu::COMPRESSEDTEXFORMAT_BC1_RGBA_SRGB_BLOCK: return VK_FORMAT_BC1_RGBA_SRGB_BLOCK;
1983 case tcu::COMPRESSEDTEXFORMAT_BC2_UNORM_BLOCK: return VK_FORMAT_BC2_UNORM_BLOCK;
1984 case tcu::COMPRESSEDTEXFORMAT_BC2_SRGB_BLOCK: return VK_FORMAT_BC2_SRGB_BLOCK;
1985 case tcu::COMPRESSEDTEXFORMAT_BC3_UNORM_BLOCK: return VK_FORMAT_BC3_UNORM_BLOCK;
1986 case tcu::COMPRESSEDTEXFORMAT_BC3_SRGB_BLOCK: return VK_FORMAT_BC3_SRGB_BLOCK;
1987 case tcu::COMPRESSEDTEXFORMAT_BC4_UNORM_BLOCK: return VK_FORMAT_BC4_UNORM_BLOCK;
1988 case tcu::COMPRESSEDTEXFORMAT_BC4_SNORM_BLOCK: return VK_FORMAT_BC4_SNORM_BLOCK;
1989 case tcu::COMPRESSEDTEXFORMAT_BC5_UNORM_BLOCK: return VK_FORMAT_BC5_UNORM_BLOCK;
1990 case tcu::COMPRESSEDTEXFORMAT_BC5_SNORM_BLOCK: return VK_FORMAT_BC5_SNORM_BLOCK;
1991 case tcu::COMPRESSEDTEXFORMAT_BC6H_UFLOAT_BLOCK: return VK_FORMAT_BC6H_UFLOAT_BLOCK;
1992 case tcu::COMPRESSEDTEXFORMAT_BC6H_SFLOAT_BLOCK: return VK_FORMAT_BC6H_SFLOAT_BLOCK;
1993 case tcu::COMPRESSEDTEXFORMAT_BC7_UNORM_BLOCK: return VK_FORMAT_BC7_UNORM_BLOCK;
1994 case tcu::COMPRESSEDTEXFORMAT_BC7_SRGB_BLOCK: return VK_FORMAT_BC7_SRGB_BLOCK;
1997 TCU_THROW(InternalError, "Unknown texture format");
1998 return VK_FORMAT_UNDEFINED;
2002 tcu::TextureFormat mapVkFormat (VkFormat format)
2004 using tcu::TextureFormat;
2006 // update this mapping if VkFormat changes
2007 DE_STATIC_ASSERT(VK_CORE_FORMAT_LAST == 185);
2011 case VK_FORMAT_R4G4_UNORM_PACK8: return TextureFormat(TextureFormat::RG, TextureFormat::UNORM_BYTE_44);
2012 case VK_FORMAT_R5G6B5_UNORM_PACK16: return TextureFormat(TextureFormat::RGB, TextureFormat::UNORM_SHORT_565);
2013 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_SHORT_4444);
2014 case VK_FORMAT_R5G5B5A1_UNORM_PACK16: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_SHORT_5551);
2016 case VK_FORMAT_B5G6R5_UNORM_PACK16: return TextureFormat(TextureFormat::BGR, TextureFormat::UNORM_SHORT_565);
2017 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNORM_SHORT_4444);
2018 case VK_FORMAT_B5G5R5A1_UNORM_PACK16: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNORM_SHORT_5551);
2020 case VK_FORMAT_A1R5G5B5_UNORM_PACK16: return TextureFormat(TextureFormat::ARGB, TextureFormat::UNORM_SHORT_1555);
2022 case VK_FORMAT_R8_UNORM: return TextureFormat(TextureFormat::R, TextureFormat::UNORM_INT8);
2023 case VK_FORMAT_R8_SNORM: return TextureFormat(TextureFormat::R, TextureFormat::SNORM_INT8);
2024 case VK_FORMAT_R8_USCALED: return TextureFormat(TextureFormat::R, TextureFormat::UNSIGNED_INT8);
2025 case VK_FORMAT_R8_SSCALED: return TextureFormat(TextureFormat::R, TextureFormat::SIGNED_INT8);
2026 case VK_FORMAT_R8_UINT: return TextureFormat(TextureFormat::R, TextureFormat::UNSIGNED_INT8);
2027 case VK_FORMAT_R8_SINT: return TextureFormat(TextureFormat::R, TextureFormat::SIGNED_INT8);
2028 case VK_FORMAT_R8_SRGB: return TextureFormat(TextureFormat::sR, TextureFormat::UNORM_INT8);
2030 case VK_FORMAT_R8G8_UNORM: return TextureFormat(TextureFormat::RG, TextureFormat::UNORM_INT8);
2031 case VK_FORMAT_R8G8_SNORM: return TextureFormat(TextureFormat::RG, TextureFormat::SNORM_INT8);
2032 case VK_FORMAT_R8G8_USCALED: return TextureFormat(TextureFormat::RG, TextureFormat::UNSIGNED_INT8);
2033 case VK_FORMAT_R8G8_SSCALED: return TextureFormat(TextureFormat::RG, TextureFormat::SIGNED_INT8);
2034 case VK_FORMAT_R8G8_UINT: return TextureFormat(TextureFormat::RG, TextureFormat::UNSIGNED_INT8);
2035 case VK_FORMAT_R8G8_SINT: return TextureFormat(TextureFormat::RG, TextureFormat::SIGNED_INT8);
2036 case VK_FORMAT_R8G8_SRGB: return TextureFormat(TextureFormat::sRG, TextureFormat::UNORM_INT8);
2038 case VK_FORMAT_R8G8B8_UNORM: return TextureFormat(TextureFormat::RGB, TextureFormat::UNORM_INT8);
2039 case VK_FORMAT_R8G8B8_SNORM: return TextureFormat(TextureFormat::RGB, TextureFormat::SNORM_INT8);
2040 case VK_FORMAT_R8G8B8_USCALED: return TextureFormat(TextureFormat::RGB, TextureFormat::UNSIGNED_INT8);
2041 case VK_FORMAT_R8G8B8_SSCALED: return TextureFormat(TextureFormat::RGB, TextureFormat::SIGNED_INT8);
2042 case VK_FORMAT_R8G8B8_UINT: return TextureFormat(TextureFormat::RGB, TextureFormat::UNSIGNED_INT8);
2043 case VK_FORMAT_R8G8B8_SINT: return TextureFormat(TextureFormat::RGB, TextureFormat::SIGNED_INT8);
2044 case VK_FORMAT_R8G8B8_SRGB: return TextureFormat(TextureFormat::sRGB, TextureFormat::UNORM_INT8);
2046 case VK_FORMAT_R8G8B8A8_UNORM: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8);
2047 case VK_FORMAT_R8G8B8A8_SNORM: return TextureFormat(TextureFormat::RGBA, TextureFormat::SNORM_INT8);
2048 case VK_FORMAT_R8G8B8A8_USCALED: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT8);
2049 case VK_FORMAT_R8G8B8A8_SSCALED: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT8);
2050 case VK_FORMAT_R8G8B8A8_UINT: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT8);
2051 case VK_FORMAT_R8G8B8A8_SINT: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT8);
2052 case VK_FORMAT_R8G8B8A8_SRGB: return TextureFormat(TextureFormat::sRGBA, TextureFormat::UNORM_INT8);
2054 case VK_FORMAT_R16_UNORM: return TextureFormat(TextureFormat::R, TextureFormat::UNORM_INT16);
2055 case VK_FORMAT_R16_SNORM: return TextureFormat(TextureFormat::R, TextureFormat::SNORM_INT16);
2056 case VK_FORMAT_R16_USCALED: return TextureFormat(TextureFormat::R, TextureFormat::UNSIGNED_INT16);
2057 case VK_FORMAT_R16_SSCALED: return TextureFormat(TextureFormat::R, TextureFormat::SIGNED_INT16);
2058 case VK_FORMAT_R16_UINT: return TextureFormat(TextureFormat::R, TextureFormat::UNSIGNED_INT16);
2059 case VK_FORMAT_R16_SINT: return TextureFormat(TextureFormat::R, TextureFormat::SIGNED_INT16);
2060 case VK_FORMAT_R16_SFLOAT: return TextureFormat(TextureFormat::R, TextureFormat::HALF_FLOAT);
2062 case VK_FORMAT_R16G16_UNORM: return TextureFormat(TextureFormat::RG, TextureFormat::UNORM_INT16);
2063 case VK_FORMAT_R16G16_SNORM: return TextureFormat(TextureFormat::RG, TextureFormat::SNORM_INT16);
2064 case VK_FORMAT_R16G16_USCALED: return TextureFormat(TextureFormat::RG, TextureFormat::UNSIGNED_INT16);
2065 case VK_FORMAT_R16G16_SSCALED: return TextureFormat(TextureFormat::RG, TextureFormat::SIGNED_INT16);
2066 case VK_FORMAT_R16G16_UINT: return TextureFormat(TextureFormat::RG, TextureFormat::UNSIGNED_INT16);
2067 case VK_FORMAT_R16G16_SINT: return TextureFormat(TextureFormat::RG, TextureFormat::SIGNED_INT16);
2068 case VK_FORMAT_R16G16_SFLOAT: return TextureFormat(TextureFormat::RG, TextureFormat::HALF_FLOAT);
2070 case VK_FORMAT_R16G16B16_UNORM: return TextureFormat(TextureFormat::RGB, TextureFormat::UNORM_INT16);
2071 case VK_FORMAT_R16G16B16_SNORM: return TextureFormat(TextureFormat::RGB, TextureFormat::SNORM_INT16);
2072 case VK_FORMAT_R16G16B16_USCALED: return TextureFormat(TextureFormat::RGB, TextureFormat::UNSIGNED_INT16);
2073 case VK_FORMAT_R16G16B16_SSCALED: return TextureFormat(TextureFormat::RGB, TextureFormat::SIGNED_INT16);
2074 case VK_FORMAT_R16G16B16_UINT: return TextureFormat(TextureFormat::RGB, TextureFormat::UNSIGNED_INT16);
2075 case VK_FORMAT_R16G16B16_SINT: return TextureFormat(TextureFormat::RGB, TextureFormat::SIGNED_INT16);
2076 case VK_FORMAT_R16G16B16_SFLOAT: return TextureFormat(TextureFormat::RGB, TextureFormat::HALF_FLOAT);
2078 case VK_FORMAT_R16G16B16A16_UNORM: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT16);
2079 case VK_FORMAT_R16G16B16A16_SNORM: return TextureFormat(TextureFormat::RGBA, TextureFormat::SNORM_INT16);
2080 case VK_FORMAT_R16G16B16A16_USCALED: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT16);
2081 case VK_FORMAT_R16G16B16A16_SSCALED: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT16);
2082 case VK_FORMAT_R16G16B16A16_UINT: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT16);
2083 case VK_FORMAT_R16G16B16A16_SINT: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT16);
2084 case VK_FORMAT_R16G16B16A16_SFLOAT: return TextureFormat(TextureFormat::RGBA, TextureFormat::HALF_FLOAT);
2086 case VK_FORMAT_R32_UINT: return TextureFormat(TextureFormat::R, TextureFormat::UNSIGNED_INT32);
2087 case VK_FORMAT_R32_SINT: return TextureFormat(TextureFormat::R, TextureFormat::SIGNED_INT32);
2088 case VK_FORMAT_R32_SFLOAT: return TextureFormat(TextureFormat::R, TextureFormat::FLOAT);
2090 case VK_FORMAT_R32G32_UINT: return TextureFormat(TextureFormat::RG, TextureFormat::UNSIGNED_INT32);
2091 case VK_FORMAT_R32G32_SINT: return TextureFormat(TextureFormat::RG, TextureFormat::SIGNED_INT32);
2092 case VK_FORMAT_R32G32_SFLOAT: return TextureFormat(TextureFormat::RG, TextureFormat::FLOAT);
2094 case VK_FORMAT_R32G32B32_UINT: return TextureFormat(TextureFormat::RGB, TextureFormat::UNSIGNED_INT32);
2095 case VK_FORMAT_R32G32B32_SINT: return TextureFormat(TextureFormat::RGB, TextureFormat::SIGNED_INT32);
2096 case VK_FORMAT_R32G32B32_SFLOAT: return TextureFormat(TextureFormat::RGB, TextureFormat::FLOAT);
2098 case VK_FORMAT_R32G32B32A32_UINT: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT32);
2099 case VK_FORMAT_R32G32B32A32_SINT: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT32);
2100 case VK_FORMAT_R32G32B32A32_SFLOAT: return TextureFormat(TextureFormat::RGBA, TextureFormat::FLOAT);
2102 case VK_FORMAT_R64_UINT: return TextureFormat(TextureFormat::R, TextureFormat::UNSIGNED_INT64);
2103 case VK_FORMAT_R64_SINT: return TextureFormat(TextureFormat::R, TextureFormat::SIGNED_INT64);
2104 case VK_FORMAT_R64_SFLOAT: return TextureFormat(TextureFormat::R, TextureFormat::FLOAT64);
2105 case VK_FORMAT_R64G64_SFLOAT: return TextureFormat(TextureFormat::RG, TextureFormat::FLOAT64);
2106 case VK_FORMAT_R64G64B64_SFLOAT: return TextureFormat(TextureFormat::RGB, TextureFormat::FLOAT64);
2107 case VK_FORMAT_R64G64B64A64_SFLOAT: return TextureFormat(TextureFormat::RGBA, TextureFormat::FLOAT64);
2109 case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return TextureFormat(TextureFormat::RGB, TextureFormat::UNSIGNED_INT_11F_11F_10F_REV);
2110 case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: return TextureFormat(TextureFormat::RGB, TextureFormat::UNSIGNED_INT_999_E5_REV);
2112 case VK_FORMAT_B8G8R8_UNORM: return TextureFormat(TextureFormat::BGR, TextureFormat::UNORM_INT8);
2113 case VK_FORMAT_B8G8R8_SNORM: return TextureFormat(TextureFormat::BGR, TextureFormat::SNORM_INT8);
2114 case VK_FORMAT_B8G8R8_USCALED: return TextureFormat(TextureFormat::BGR, TextureFormat::UNSIGNED_INT8);
2115 case VK_FORMAT_B8G8R8_SSCALED: return TextureFormat(TextureFormat::BGR, TextureFormat::SIGNED_INT8);
2116 case VK_FORMAT_B8G8R8_UINT: return TextureFormat(TextureFormat::BGR, TextureFormat::UNSIGNED_INT8);
2117 case VK_FORMAT_B8G8R8_SINT: return TextureFormat(TextureFormat::BGR, TextureFormat::SIGNED_INT8);
2118 case VK_FORMAT_B8G8R8_SRGB: return TextureFormat(TextureFormat::sBGR, TextureFormat::UNORM_INT8);
2120 case VK_FORMAT_B8G8R8A8_UNORM: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNORM_INT8);
2121 case VK_FORMAT_B8G8R8A8_SNORM: return TextureFormat(TextureFormat::BGRA, TextureFormat::SNORM_INT8);
2122 case VK_FORMAT_B8G8R8A8_USCALED: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNSIGNED_INT8);
2123 case VK_FORMAT_B8G8R8A8_SSCALED: return TextureFormat(TextureFormat::BGRA, TextureFormat::SIGNED_INT8);
2124 case VK_FORMAT_B8G8R8A8_UINT: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNSIGNED_INT8);
2125 case VK_FORMAT_B8G8R8A8_SINT: return TextureFormat(TextureFormat::BGRA, TextureFormat::SIGNED_INT8);
2126 case VK_FORMAT_B8G8R8A8_SRGB: return TextureFormat(TextureFormat::sBGRA, TextureFormat::UNORM_INT8);
2128 case VK_FORMAT_D16_UNORM: return TextureFormat(TextureFormat::D, TextureFormat::UNORM_INT16);
2129 case VK_FORMAT_X8_D24_UNORM_PACK32: return TextureFormat(TextureFormat::D, TextureFormat::UNSIGNED_INT_24_8_REV);
2130 case VK_FORMAT_D32_SFLOAT: return TextureFormat(TextureFormat::D, TextureFormat::FLOAT);
2132 case VK_FORMAT_S8_UINT: return TextureFormat(TextureFormat::S, TextureFormat::UNSIGNED_INT8);
2134 // \note There is no standard interleaved memory layout for DS formats; buffer-image copies
2135 // will always operate on either D or S aspect only. See Khronos bug 12998
2136 case VK_FORMAT_D16_UNORM_S8_UINT: return TextureFormat(TextureFormat::DS, TextureFormat::UNSIGNED_INT_16_8_8);
2137 case VK_FORMAT_D24_UNORM_S8_UINT: return TextureFormat(TextureFormat::DS, TextureFormat::UNSIGNED_INT_24_8_REV);
2138 case VK_FORMAT_D32_SFLOAT_S8_UINT: return TextureFormat(TextureFormat::DS, TextureFormat::FLOAT_UNSIGNED_INT_24_8_REV);
2140 #if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2141 case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8);
2142 case VK_FORMAT_A8B8G8R8_SNORM_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::SNORM_INT8);
2143 case VK_FORMAT_A8B8G8R8_USCALED_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT8);
2144 case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT8);
2145 case VK_FORMAT_A8B8G8R8_UINT_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT8);
2146 case VK_FORMAT_A8B8G8R8_SINT_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT8);
2147 case VK_FORMAT_A8B8G8R8_SRGB_PACK32: return TextureFormat(TextureFormat::sRGBA, TextureFormat::UNORM_INT8);
2149 # error "Big-endian not supported"
2152 case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNORM_INT_1010102_REV);
2153 case VK_FORMAT_A2R10G10B10_SNORM_PACK32: return TextureFormat(TextureFormat::BGRA, TextureFormat::SNORM_INT_1010102_REV);
2154 case VK_FORMAT_A2R10G10B10_USCALED_PACK32: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNSIGNED_INT_1010102_REV);
2155 case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: return TextureFormat(TextureFormat::BGRA, TextureFormat::SIGNED_INT_1010102_REV);
2156 case VK_FORMAT_A2R10G10B10_UINT_PACK32: return TextureFormat(TextureFormat::BGRA, TextureFormat::UNSIGNED_INT_1010102_REV);
2157 case VK_FORMAT_A2R10G10B10_SINT_PACK32: return TextureFormat(TextureFormat::BGRA, TextureFormat::SIGNED_INT_1010102_REV);
2159 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT_1010102_REV);
2160 case VK_FORMAT_A2B10G10R10_SNORM_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::SNORM_INT_1010102_REV);
2161 case VK_FORMAT_A2B10G10R10_USCALED_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT_1010102_REV);
2162 case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT_1010102_REV);
2163 case VK_FORMAT_A2B10G10R10_UINT_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT_1010102_REV);
2164 case VK_FORMAT_A2B10G10R10_SINT_PACK32: return TextureFormat(TextureFormat::RGBA, TextureFormat::SIGNED_INT_1010102_REV);
2166 // YCbCr formats that can be mapped
2167 case VK_FORMAT_R10X6_UNORM_PACK16_KHR: return TextureFormat(TextureFormat::R, TextureFormat::UNORM_SHORT_10);
2168 case VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR: return TextureFormat(TextureFormat::RG, TextureFormat::UNORM_SHORT_10);
2169 case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_SHORT_10);
2171 case VK_FORMAT_R12X4_UNORM_PACK16_KHR: return TextureFormat(TextureFormat::R, TextureFormat::UNORM_SHORT_12);
2172 case VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR: return TextureFormat(TextureFormat::RG, TextureFormat::UNORM_SHORT_12);
2173 case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR: return TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_SHORT_12);
2176 TCU_THROW(InternalError, "Unknown image format");
2180 tcu::CompressedTexFormat mapVkCompressedFormat (VkFormat format)
2182 // update this mapping if VkFormat changes
2183 DE_STATIC_ASSERT(VK_CORE_FORMAT_LAST == 185);
2187 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ETC2_RGB8;
2188 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ETC2_SRGB8;
2189 case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ETC2_RGB8_PUNCHTHROUGH_ALPHA1;
2190 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ETC2_SRGB8_PUNCHTHROUGH_ALPHA1;
2191 case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ETC2_EAC_RGBA8;
2192 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ETC2_EAC_SRGB8_ALPHA8;
2194 case VK_FORMAT_EAC_R11_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_EAC_R11;
2195 case VK_FORMAT_EAC_R11_SNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_EAC_SIGNED_R11;
2196 case VK_FORMAT_EAC_R11G11_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_EAC_RG11;
2197 case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_EAC_SIGNED_RG11;
2199 case VK_FORMAT_ASTC_4x4_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_4x4_RGBA;
2200 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_4x4_SRGB8_ALPHA8;
2201 case VK_FORMAT_ASTC_5x4_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_5x4_RGBA;
2202 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_5x4_SRGB8_ALPHA8;
2203 case VK_FORMAT_ASTC_5x5_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_5x5_RGBA;
2204 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_5x5_SRGB8_ALPHA8;
2205 case VK_FORMAT_ASTC_6x5_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_6x5_RGBA;
2206 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_6x5_SRGB8_ALPHA8;
2207 case VK_FORMAT_ASTC_6x6_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_6x6_RGBA;
2208 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_6x6_SRGB8_ALPHA8;
2209 case VK_FORMAT_ASTC_8x5_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_8x5_RGBA;
2210 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_8x5_SRGB8_ALPHA8;
2211 case VK_FORMAT_ASTC_8x6_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_8x6_RGBA;
2212 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_8x6_SRGB8_ALPHA8;
2213 case VK_FORMAT_ASTC_8x8_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_8x8_RGBA;
2214 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_8x8_SRGB8_ALPHA8;
2215 case VK_FORMAT_ASTC_10x5_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x5_RGBA;
2216 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x5_SRGB8_ALPHA8;
2217 case VK_FORMAT_ASTC_10x6_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x6_RGBA;
2218 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x6_SRGB8_ALPHA8;
2219 case VK_FORMAT_ASTC_10x8_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x8_RGBA;
2220 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x8_SRGB8_ALPHA8;
2221 case VK_FORMAT_ASTC_10x10_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x10_RGBA;
2222 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_10x10_SRGB8_ALPHA8;
2223 case VK_FORMAT_ASTC_12x10_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_12x10_RGBA;
2224 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_12x10_SRGB8_ALPHA8;
2225 case VK_FORMAT_ASTC_12x12_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_12x12_RGBA;
2226 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_ASTC_12x12_SRGB8_ALPHA8;
2228 case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC1_RGB_UNORM_BLOCK;
2229 case VK_FORMAT_BC1_RGB_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC1_RGB_SRGB_BLOCK;
2230 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC1_RGBA_UNORM_BLOCK;
2231 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC1_RGBA_SRGB_BLOCK;
2232 case VK_FORMAT_BC2_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC2_UNORM_BLOCK;
2233 case VK_FORMAT_BC2_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC2_SRGB_BLOCK;
2234 case VK_FORMAT_BC3_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC3_UNORM_BLOCK;
2235 case VK_FORMAT_BC3_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC3_SRGB_BLOCK;
2236 case VK_FORMAT_BC4_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC4_UNORM_BLOCK;
2237 case VK_FORMAT_BC4_SNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC4_SNORM_BLOCK;
2238 case VK_FORMAT_BC5_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC5_UNORM_BLOCK;
2239 case VK_FORMAT_BC5_SNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC5_SNORM_BLOCK;
2240 case VK_FORMAT_BC6H_UFLOAT_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC6H_UFLOAT_BLOCK;
2241 case VK_FORMAT_BC6H_SFLOAT_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC6H_SFLOAT_BLOCK;
2242 case VK_FORMAT_BC7_UNORM_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC7_UNORM_BLOCK;
2243 case VK_FORMAT_BC7_SRGB_BLOCK: return tcu::COMPRESSEDTEXFORMAT_BC7_SRGB_BLOCK;
2246 TCU_THROW(InternalError, "Unknown image format");
2247 return tcu::COMPRESSEDTEXFORMAT_LAST;
2251 static bool isScaledFormat (VkFormat format)
2253 // update this mapping if VkFormat changes
2254 DE_STATIC_ASSERT(VK_CORE_FORMAT_LAST == 185);
2258 case VK_FORMAT_R8_USCALED:
2259 case VK_FORMAT_R8_SSCALED:
2260 case VK_FORMAT_R8G8_USCALED:
2261 case VK_FORMAT_R8G8_SSCALED:
2262 case VK_FORMAT_R8G8B8_USCALED:
2263 case VK_FORMAT_R8G8B8_SSCALED:
2264 case VK_FORMAT_R8G8B8A8_USCALED:
2265 case VK_FORMAT_R8G8B8A8_SSCALED:
2266 case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
2267 case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
2268 case VK_FORMAT_R16_USCALED:
2269 case VK_FORMAT_R16_SSCALED:
2270 case VK_FORMAT_R16G16_USCALED:
2271 case VK_FORMAT_R16G16_SSCALED:
2272 case VK_FORMAT_R16G16B16_USCALED:
2273 case VK_FORMAT_R16G16B16_SSCALED:
2274 case VK_FORMAT_R16G16B16A16_USCALED:
2275 case VK_FORMAT_R16G16B16A16_SSCALED:
2276 case VK_FORMAT_B8G8R8_USCALED:
2277 case VK_FORMAT_B8G8R8_SSCALED:
2278 case VK_FORMAT_B8G8R8A8_USCALED:
2279 case VK_FORMAT_B8G8R8A8_SSCALED:
2280 case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
2281 case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
2289 static bool fullTextureFormatRoundTripSupported (VkFormat format)
2291 if (isScaledFormat(format))
2293 // *SCALED formats get mapped to correspoding (u)int formats since
2294 // accessing them through (float) getPixel/setPixel has same behavior
2295 // as in shader access in Vulkan.
2296 // Unfortunately full round-trip between tcu::TextureFormat and VkFormat
2297 // for most SCALED formats is not supported though.
2299 const tcu::TextureFormat tcuFormat = mapVkFormat(format);
2301 switch (tcuFormat.type)
2303 case tcu::TextureFormat::UNSIGNED_INT8:
2304 case tcu::TextureFormat::UNSIGNED_INT16:
2305 case tcu::TextureFormat::UNSIGNED_INT32:
2306 case tcu::TextureFormat::SIGNED_INT8:
2307 case tcu::TextureFormat::SIGNED_INT16:
2308 case tcu::TextureFormat::SIGNED_INT32:
2309 case tcu::TextureFormat::UNSIGNED_INT_1010102_REV:
2310 case tcu::TextureFormat::SIGNED_INT_1010102_REV:
2321 case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
2322 case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
2323 case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
2324 case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
2325 case VK_FORMAT_A8B8G8R8_UINT_PACK32:
2326 case VK_FORMAT_A8B8G8R8_SINT_PACK32:
2327 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
2328 return false; // These map to regular byte array formats
2334 return (format != VK_FORMAT_UNDEFINED);
2338 tcu::TextureFormat getChannelAccessFormat (tcu::TextureChannelClass type,
2339 deUint32 offsetBits,
2342 using tcu::TextureFormat;
2344 if (offsetBits == 0)
2346 static const TextureFormat::ChannelType s_size8[tcu::TEXTURECHANNELCLASS_LAST] =
2348 TextureFormat::SNORM_INT8, // snorm
2349 TextureFormat::UNORM_INT8, // unorm
2350 TextureFormat::SIGNED_INT8, // sint
2351 TextureFormat::UNSIGNED_INT8, // uint
2352 TextureFormat::CHANNELTYPE_LAST, // float
2354 static const TextureFormat::ChannelType s_size16[tcu::TEXTURECHANNELCLASS_LAST] =
2356 TextureFormat::SNORM_INT16, // snorm
2357 TextureFormat::UNORM_INT16, // unorm
2358 TextureFormat::SIGNED_INT16, // sint
2359 TextureFormat::UNSIGNED_INT16, // uint
2360 TextureFormat::HALF_FLOAT, // float
2362 static const TextureFormat::ChannelType s_size32[tcu::TEXTURECHANNELCLASS_LAST] =
2364 TextureFormat::SNORM_INT32, // snorm
2365 TextureFormat::UNORM_INT32, // unorm
2366 TextureFormat::SIGNED_INT32, // sint
2367 TextureFormat::UNSIGNED_INT32, // uint
2368 TextureFormat::FLOAT, // float
2371 TextureFormat::ChannelType chnType = TextureFormat::CHANNELTYPE_LAST;
2374 chnType = s_size8[type];
2375 else if (sizeBits == 16)
2376 chnType = s_size16[type];
2377 else if (sizeBits == 32)
2378 chnType = s_size32[type];
2380 if (chnType != TextureFormat::CHANNELTYPE_LAST)
2381 return TextureFormat(TextureFormat::R, chnType);
2385 if (type == tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT &&
2388 return TextureFormat(TextureFormat::R, TextureFormat::UNORM_SHORT_10);
2389 else if (type == tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT &&
2392 return TextureFormat(TextureFormat::R, TextureFormat::UNORM_SHORT_12);
2395 TCU_THROW(InternalError, "Channel access format is not supported");
2398 tcu::PixelBufferAccess getChannelAccess (const PlanarFormatDescription& formatInfo,
2399 const tcu::UVec2& size,
2400 const deUint32* planeRowPitches,
2401 void* const* planePtrs,
2402 deUint32 channelNdx)
2404 DE_ASSERT(formatInfo.hasChannelNdx(channelNdx));
2406 const deUint32 planeNdx = formatInfo.channels[channelNdx].planeNdx;
2407 const deUint32 planeOffsetBytes = formatInfo.channels[channelNdx].offsetBits / 8;
2408 const deUint32 valueOffsetBits = formatInfo.channels[channelNdx].offsetBits % 8;
2409 const deUint32 pixelStrideBytes = formatInfo.channels[channelNdx].strideBytes;
2411 DE_ASSERT(size.x() % formatInfo.planes[planeNdx].widthDivisor == 0);
2412 DE_ASSERT(size.y() % formatInfo.planes[planeNdx].heightDivisor == 0);
2414 deUint32 accessWidth = size.x() / formatInfo.planes[planeNdx].widthDivisor;
2415 const deUint32 accessHeight = size.y() / formatInfo.planes[planeNdx].heightDivisor;
2416 const deUint32 elementSizeBytes = formatInfo.planes[planeNdx].elementSizeBytes;
2418 const deUint32 rowPitch = planeRowPitches[planeNdx];
2420 if (pixelStrideBytes != elementSizeBytes)
2422 DE_ASSERT(elementSizeBytes % pixelStrideBytes == 0);
2423 accessWidth *= elementSizeBytes/pixelStrideBytes;
2426 return tcu::PixelBufferAccess(getChannelAccessFormat((tcu::TextureChannelClass)formatInfo.channels[channelNdx].type,
2428 formatInfo.channels[channelNdx].sizeBits),
2429 tcu::IVec3((int)accessWidth, (int)accessHeight, 1),
2430 tcu::IVec3((int)pixelStrideBytes, (int)rowPitch, 0),
2431 (deUint8*)planePtrs[planeNdx] + planeOffsetBytes);
2435 tcu::ConstPixelBufferAccess getChannelAccess (const PlanarFormatDescription& formatInfo,
2436 const tcu::UVec2& size,
2437 const deUint32* planeRowPitches,
2438 const void* const* planePtrs,
2439 deUint32 channelNdx)
2441 return getChannelAccess(formatInfo, size, planeRowPitches, const_cast<void* const*>(planePtrs), channelNdx);
2444 void imageUtilSelfTest (void)
2446 for (int formatNdx = 0; formatNdx < VK_CORE_FORMAT_LAST; formatNdx++)
2448 const VkFormat format = (VkFormat)formatNdx;
2450 if (format == VK_FORMAT_R64_UINT ||
2451 format == VK_FORMAT_R64_SINT ||
2452 format == VK_FORMAT_R64G64_UINT ||
2453 format == VK_FORMAT_R64G64_SINT ||
2454 format == VK_FORMAT_R64G64B64_UINT ||
2455 format == VK_FORMAT_R64G64B64_SINT ||
2456 format == VK_FORMAT_R64G64B64A64_UINT ||
2457 format == VK_FORMAT_R64G64B64A64_SINT)
2458 continue; // \todo [2015-12-05 pyry] Add framework support for (u)int64 channel type
2460 if (format != VK_FORMAT_UNDEFINED && !isCompressedFormat(format))
2462 const tcu::TextureFormat tcuFormat = mapVkFormat(format);
2463 const VkFormat remappedFormat = mapTextureFormat(tcuFormat);
2465 DE_TEST_ASSERT(isValid(tcuFormat));
2467 if (fullTextureFormatRoundTripSupported(format))
2468 DE_TEST_ASSERT(format == remappedFormat);
2472 for (int formatNdx = VK_FORMAT_G8B8G8R8_422_UNORM_KHR; formatNdx <= VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR; formatNdx++)
2474 const VkFormat format = (VkFormat)formatNdx;
2475 const PlanarFormatDescription& info = getPlanarFormatDescription(format);
2477 DE_TEST_ASSERT(isYCbCrFormat(format));
2478 DE_TEST_ASSERT(de::inRange<deUint8>(info.numPlanes, 1u, 3u));
2479 DE_TEST_ASSERT(info.numPlanes == getPlaneCount(format));
2483 struct CompressedFormatParameters
2486 deUint32 blockBytes;
2487 deUint32 blockWidth;
2488 deUint32 blockHeight;
2491 CompressedFormatParameters compressedFormatParameters[VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_BC1_RGB_UNORM_BLOCK + 1] =
2493 { VK_FORMAT_BC1_RGB_UNORM_BLOCK, 8, 4, 4 },
2494 { VK_FORMAT_BC1_RGB_SRGB_BLOCK, 8, 4, 4 },
2495 { VK_FORMAT_BC1_RGBA_UNORM_BLOCK, 8, 4, 4 },
2496 { VK_FORMAT_BC1_RGBA_SRGB_BLOCK, 8, 4, 4 },
2497 { VK_FORMAT_BC2_UNORM_BLOCK, 16, 4, 4 },
2498 { VK_FORMAT_BC2_SRGB_BLOCK, 16, 4, 4 },
2499 { VK_FORMAT_BC3_UNORM_BLOCK, 16, 4, 4 },
2500 { VK_FORMAT_BC3_SRGB_BLOCK, 16, 4, 4 },
2501 { VK_FORMAT_BC4_UNORM_BLOCK, 8, 4, 4 },
2502 { VK_FORMAT_BC4_SNORM_BLOCK, 8, 4, 4 },
2503 { VK_FORMAT_BC5_UNORM_BLOCK, 16, 4, 4 },
2504 { VK_FORMAT_BC5_SNORM_BLOCK, 16, 4, 4 },
2505 { VK_FORMAT_BC6H_UFLOAT_BLOCK, 16, 4, 4 },
2506 { VK_FORMAT_BC6H_SFLOAT_BLOCK, 16, 4, 4 },
2507 { VK_FORMAT_BC7_UNORM_BLOCK, 16, 4, 4 },
2508 { VK_FORMAT_BC7_SRGB_BLOCK, 16, 4, 4 },
2509 { VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, 8, 4, 4 },
2510 { VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, 8, 4, 4 },
2511 { VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, 8, 4, 4 },
2512 { VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, 8, 4, 4 },
2513 { VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, 16, 4, 4 },
2514 { VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, 16, 4, 4 },
2515 { VK_FORMAT_EAC_R11_UNORM_BLOCK, 8, 4, 4 },
2516 { VK_FORMAT_EAC_R11_SNORM_BLOCK, 8, 4, 4 },
2517 { VK_FORMAT_EAC_R11G11_UNORM_BLOCK, 16, 4, 4 },
2518 { VK_FORMAT_EAC_R11G11_SNORM_BLOCK, 16, 4, 4 },
2519 { VK_FORMAT_ASTC_4x4_UNORM_BLOCK, 16, 4, 4 },
2520 { VK_FORMAT_ASTC_4x4_SRGB_BLOCK, 16, 4, 4 },
2521 { VK_FORMAT_ASTC_5x4_UNORM_BLOCK, 16, 5, 4 },
2522 { VK_FORMAT_ASTC_5x4_SRGB_BLOCK, 16, 5, 4 },
2523 { VK_FORMAT_ASTC_5x5_UNORM_BLOCK, 16, 5, 5 },
2524 { VK_FORMAT_ASTC_5x5_SRGB_BLOCK, 16, 5, 5 },
2525 { VK_FORMAT_ASTC_6x5_UNORM_BLOCK, 16, 6, 5 },
2526 { VK_FORMAT_ASTC_6x5_SRGB_BLOCK, 16, 6, 5 },
2527 { VK_FORMAT_ASTC_6x6_UNORM_BLOCK, 16, 6, 6 },
2528 { VK_FORMAT_ASTC_6x6_SRGB_BLOCK, 16, 6, 6 },
2529 { VK_FORMAT_ASTC_8x5_UNORM_BLOCK, 16, 8, 5 },
2530 { VK_FORMAT_ASTC_8x5_SRGB_BLOCK, 16, 8, 5 },
2531 { VK_FORMAT_ASTC_8x6_UNORM_BLOCK, 16, 8, 6 },
2532 { VK_FORMAT_ASTC_8x6_SRGB_BLOCK, 16, 8, 6 },
2533 { VK_FORMAT_ASTC_8x8_UNORM_BLOCK, 16, 8, 8 },
2534 { VK_FORMAT_ASTC_8x8_SRGB_BLOCK, 16, 8, 8 },
2535 { VK_FORMAT_ASTC_10x5_UNORM_BLOCK, 16, 10, 5 },
2536 { VK_FORMAT_ASTC_10x5_SRGB_BLOCK, 16, 10, 5 },
2537 { VK_FORMAT_ASTC_10x6_UNORM_BLOCK, 16, 10, 6 },
2538 { VK_FORMAT_ASTC_10x6_SRGB_BLOCK, 16, 10, 6 },
2539 { VK_FORMAT_ASTC_10x8_UNORM_BLOCK, 16, 10, 8 },
2540 { VK_FORMAT_ASTC_10x8_SRGB_BLOCK, 16, 10, 8 },
2541 { VK_FORMAT_ASTC_10x10_UNORM_BLOCK, 16, 10, 10 },
2542 { VK_FORMAT_ASTC_10x10_SRGB_BLOCK, 16, 10, 10 },
2543 { VK_FORMAT_ASTC_12x10_UNORM_BLOCK, 16, 12, 10 },
2544 { VK_FORMAT_ASTC_12x10_SRGB_BLOCK, 16, 12, 10 },
2545 { VK_FORMAT_ASTC_12x12_UNORM_BLOCK, 16, 12, 12 },
2546 { VK_FORMAT_ASTC_12x12_SRGB_BLOCK, 16, 12, 12 }
2549 deUint32 getFormatComponentWidth (const VkFormat format, const deUint32 componentNdx)
2551 const tcu::TextureFormat tcuFormat (mapVkFormat(format));
2552 const deUint32 componentCount (tcu::getNumUsedChannels(tcuFormat.order));
2554 if (componentNdx >= componentCount)
2555 DE_FATAL("Component index out of range");
2558 switch (tcuFormat.type)
2560 case tcu::TextureFormat::UNORM_INT8:
2561 case tcu::TextureFormat::SNORM_INT8:
2562 case tcu::TextureFormat::UNSIGNED_INT8:
2563 case tcu::TextureFormat::SIGNED_INT8:
2566 case tcu::TextureFormat::UNORM_SHORT_12:
2569 case tcu::TextureFormat::UNORM_INT16:
2570 case tcu::TextureFormat::SNORM_INT16:
2571 case tcu::TextureFormat::UNSIGNED_INT16:
2572 case tcu::TextureFormat::SIGNED_INT16:
2575 case tcu::TextureFormat::UNORM_INT24:
2576 case tcu::TextureFormat::UNSIGNED_INT24:
2579 case tcu::TextureFormat::UNORM_INT32:
2580 case tcu::TextureFormat::SNORM_INT32:
2581 case tcu::TextureFormat::UNSIGNED_INT32:
2582 case tcu::TextureFormat::SIGNED_INT32:
2585 case tcu::TextureFormat::FLOAT64:
2589 case tcu::TextureFormat::UNORM_SHORT_4444:
2590 case tcu::TextureFormat::UNSIGNED_SHORT_4444:
2593 case tcu::TextureFormat::UNORM_SHORT_565:
2594 case tcu::TextureFormat::UNSIGNED_SHORT_565:
2595 return (componentNdx == 1 ? 6 : 5);
2597 case tcu::TextureFormat::UNSIGNED_INT_24_8:
2598 case tcu::TextureFormat::UNSIGNED_INT_24_8_REV:
2599 case tcu::TextureFormat::FLOAT_UNSIGNED_INT_24_8_REV:
2600 return (componentNdx == 0 ? 24 : 8);
2602 case tcu::TextureFormat::UNORM_SHORT_1555:
2603 return (componentNdx == 0 ? 1 : 5);
2605 case tcu::TextureFormat::UNORM_INT_1010102_REV:
2606 case tcu::TextureFormat::SNORM_INT_1010102_REV:
2607 case tcu::TextureFormat::UNSIGNED_INT_1010102_REV:
2608 case tcu::TextureFormat::SIGNED_INT_1010102_REV:
2609 return (componentNdx == 3 ? 2 : 10);
2612 DE_FATAL("Format unimplemented");
2619 float getRepresentableDiffUnorm (const VkFormat format, const deUint32 componentNdx)
2621 const deUint32 size (getFormatComponentWidth(format, componentNdx));
2623 return 1.0f / float((1 << (size)) - 1);
2626 float getRepresentableDiffSnorm (const VkFormat format, const deUint32 componentNdx)
2628 const deUint32 size (getFormatComponentWidth(format, componentNdx));
2630 return 1.0f / float((1 << (size - 1)) - 1);
2633 deUint32 getBlockSizeInBytes (const VkFormat compressedFormat)
2635 deUint32 formatNdx = static_cast<deUint32>(compressedFormat - VK_FORMAT_BC1_RGB_UNORM_BLOCK);
2637 DE_ASSERT(deInRange32(formatNdx, 0, DE_LENGTH_OF_ARRAY(compressedFormatParameters)));
2638 DE_ASSERT(compressedFormatParameters[formatNdx].format == compressedFormat);
2640 return compressedFormatParameters[formatNdx].blockBytes;
2643 deUint32 getBlockWidth (const VkFormat compressedFormat)
2645 deUint32 formatNdx = static_cast<deUint32>(compressedFormat - VK_FORMAT_BC1_RGB_UNORM_BLOCK);
2647 DE_ASSERT(deInRange32(formatNdx, 0, DE_LENGTH_OF_ARRAY(compressedFormatParameters)));
2648 DE_ASSERT(compressedFormatParameters[formatNdx].format == compressedFormat);
2650 return compressedFormatParameters[formatNdx].blockWidth;
2653 deUint32 getBlockHeight (const VkFormat compressedFormat)
2655 deUint32 formatNdx = static_cast<deUint32>(compressedFormat - VK_FORMAT_BC1_RGB_UNORM_BLOCK);
2657 DE_ASSERT(deInRange32(formatNdx, 0, DE_LENGTH_OF_ARRAY(compressedFormatParameters)));
2658 DE_ASSERT(compressedFormatParameters[formatNdx].format == compressedFormat);
2660 return compressedFormatParameters[formatNdx].blockHeight;
2663 VkFilter mapFilterMode (tcu::Sampler::FilterMode filterMode)
2665 DE_STATIC_ASSERT(tcu::Sampler::FILTERMODE_LAST == 6);
2669 case tcu::Sampler::NEAREST: return VK_FILTER_NEAREST;
2670 case tcu::Sampler::LINEAR: return VK_FILTER_LINEAR;
2671 case tcu::Sampler::NEAREST_MIPMAP_NEAREST: return VK_FILTER_NEAREST;
2672 case tcu::Sampler::NEAREST_MIPMAP_LINEAR: return VK_FILTER_NEAREST;
2673 case tcu::Sampler::LINEAR_MIPMAP_NEAREST: return VK_FILTER_LINEAR;
2674 case tcu::Sampler::LINEAR_MIPMAP_LINEAR: return VK_FILTER_LINEAR;
2676 DE_FATAL("Illegal filter mode");
2681 VkSamplerMipmapMode mapMipmapMode (tcu::Sampler::FilterMode filterMode)
2683 DE_STATIC_ASSERT(tcu::Sampler::FILTERMODE_LAST == 6);
2685 // \note VkSamplerCreateInfo doesn't have a flag for disabling mipmapping. Instead
2686 // minLod = 0 and maxLod = 0.25 should be used to match OpenGL NEAREST and LINEAR
2687 // filtering mode behavior.
2691 case tcu::Sampler::NEAREST: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
2692 case tcu::Sampler::LINEAR: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
2693 case tcu::Sampler::NEAREST_MIPMAP_NEAREST: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
2694 case tcu::Sampler::NEAREST_MIPMAP_LINEAR: return VK_SAMPLER_MIPMAP_MODE_LINEAR;
2695 case tcu::Sampler::LINEAR_MIPMAP_NEAREST: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
2696 case tcu::Sampler::LINEAR_MIPMAP_LINEAR: return VK_SAMPLER_MIPMAP_MODE_LINEAR;
2698 DE_FATAL("Illegal filter mode");
2699 return (VkSamplerMipmapMode)0;
2703 VkSamplerAddressMode mapWrapMode (tcu::Sampler::WrapMode wrapMode)
2707 case tcu::Sampler::CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
2708 case tcu::Sampler::CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
2709 case tcu::Sampler::REPEAT_GL: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
2710 case tcu::Sampler::MIRRORED_REPEAT_GL: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
2711 case tcu::Sampler::MIRRORED_ONCE: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
2713 DE_FATAL("Wrap mode can't be mapped to Vulkan");
2714 return (vk::VkSamplerAddressMode)0;
2718 vk::VkCompareOp mapCompareMode (tcu::Sampler::CompareMode mode)
2722 case tcu::Sampler::COMPAREMODE_NONE: return vk::VK_COMPARE_OP_NEVER;
2723 case tcu::Sampler::COMPAREMODE_LESS: return vk::VK_COMPARE_OP_LESS;
2724 case tcu::Sampler::COMPAREMODE_LESS_OR_EQUAL: return vk::VK_COMPARE_OP_LESS_OR_EQUAL;
2725 case tcu::Sampler::COMPAREMODE_GREATER: return vk::VK_COMPARE_OP_GREATER;
2726 case tcu::Sampler::COMPAREMODE_GREATER_OR_EQUAL: return vk::VK_COMPARE_OP_GREATER_OR_EQUAL;
2727 case tcu::Sampler::COMPAREMODE_EQUAL: return vk::VK_COMPARE_OP_EQUAL;
2728 case tcu::Sampler::COMPAREMODE_NOT_EQUAL: return vk::VK_COMPARE_OP_NOT_EQUAL;
2729 case tcu::Sampler::COMPAREMODE_ALWAYS: return vk::VK_COMPARE_OP_ALWAYS;
2730 case tcu::Sampler::COMPAREMODE_NEVER: return vk::VK_COMPARE_OP_NEVER;
2732 DE_FATAL("Illegal compare mode");
2733 return (vk::VkCompareOp)0;
2737 static VkBorderColor mapBorderColor (tcu::TextureChannelClass channelClass, const rr::GenericVec4& color)
2739 if (channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
2741 const tcu::UVec4 uColor = color.get<deUint32>();
2743 if (uColor == tcu::UVec4(0, 0, 0, 0)) return VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
2744 else if (uColor == tcu::UVec4(0, 0, 0, 1)) return VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2745 else if (uColor == tcu::UVec4(1, 1, 1, 1)) return VK_BORDER_COLOR_INT_OPAQUE_WHITE;
2747 else if (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)
2749 const tcu::IVec4 sColor = color.get<deInt32>();
2751 if (sColor == tcu::IVec4(0, 0, 0, 0)) return VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
2752 else if (sColor == tcu::IVec4(0, 0, 0, 1)) return VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2753 else if (sColor == tcu::IVec4(1, 1, 1, 1)) return VK_BORDER_COLOR_INT_OPAQUE_WHITE;
2757 const tcu::Vec4 fColor = color.get<float>();
2759 if (fColor == tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f)) return VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK;
2760 else if (fColor == tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f)) return VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
2761 else if (fColor == tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f)) return VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
2764 DE_FATAL("Unsupported border color");
2765 return VK_BORDER_COLOR_LAST;
2768 VkSamplerCreateInfo mapSampler (const tcu::Sampler& sampler, const tcu::TextureFormat& format, float minLod, float maxLod, bool unnormal)
2770 const bool compareEnabled = (sampler.compare != tcu::Sampler::COMPAREMODE_NONE);
2771 const VkCompareOp compareOp = (compareEnabled) ? (mapCompareMode(sampler.compare)) : (VK_COMPARE_OP_ALWAYS);
2772 const VkBorderColor borderColor = mapBorderColor(getTextureChannelClass(format.type), sampler.borderColor);
2773 const bool isMipmapEnabled = (sampler.minFilter != tcu::Sampler::NEAREST && sampler.minFilter != tcu::Sampler::LINEAR);
2775 const VkSamplerCreateInfo createInfo =
2777 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
2779 (VkSamplerCreateFlags)0,
2780 mapFilterMode(sampler.magFilter), // magFilter
2781 mapFilterMode(sampler.minFilter), // minFilter
2782 mapMipmapMode(sampler.minFilter), // mipMode
2783 mapWrapMode(sampler.wrapS), // addressU
2784 mapWrapMode(sampler.wrapT), // addressV
2785 mapWrapMode(sampler.wrapR), // addressW
2787 VK_FALSE, // anisotropyEnable
2788 1.0f, // maxAnisotropy
2789 (VkBool32)(compareEnabled ? VK_TRUE : VK_FALSE), // compareEnable
2790 compareOp, // compareOp
2791 (isMipmapEnabled ? minLod : 0.0f), // minLod
2792 (isMipmapEnabled ? maxLod : (unnormal ? 0.0f : 0.25f)), // maxLod
2793 borderColor, // borderColor
2794 (VkBool32)(sampler.normalizedCoords ? VK_FALSE : VK_TRUE), // unnormalizedCoords
2800 tcu::Sampler mapVkSampler (const VkSamplerCreateInfo& samplerCreateInfo)
2802 // \note minLod & maxLod are not supported by tcu::Sampler. LOD must be clamped
2803 // before passing it to tcu::Texture*::sample*()
2805 tcu::Sampler::ReductionMode reductionMode = tcu::Sampler::WEIGHTED_AVERAGE;
2807 void const *pNext = samplerCreateInfo.pNext;
2808 while (pNext != DE_NULL)
2810 const VkStructureType nextType = *reinterpret_cast<const VkStructureType*>(pNext);
2813 case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
2815 const VkSamplerReductionModeCreateInfo reductionModeCreateInfo = *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>(pNext);
2816 reductionMode = mapVkSamplerReductionMode(reductionModeCreateInfo.reductionMode);
2817 pNext = reinterpret_cast<const VkSamplerReductionModeCreateInfo*>(pNext)->pNext;
2820 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2821 pNext = reinterpret_cast<const VkSamplerYcbcrConversionInfo*>(pNext)->pNext;
2824 TCU_FAIL("Unrecognized sType in chained sampler create info");
2830 tcu::Sampler sampler(mapVkSamplerAddressMode(samplerCreateInfo.addressModeU),
2831 mapVkSamplerAddressMode(samplerCreateInfo.addressModeV),
2832 mapVkSamplerAddressMode(samplerCreateInfo.addressModeW),
2833 mapVkMinTexFilter(samplerCreateInfo.minFilter, samplerCreateInfo.mipmapMode),
2834 mapVkMagTexFilter(samplerCreateInfo.magFilter),
2836 !samplerCreateInfo.unnormalizedCoordinates,
2837 samplerCreateInfo.compareEnable ? mapVkSamplerCompareOp(samplerCreateInfo.compareOp)
2838 : tcu::Sampler::COMPAREMODE_NONE,
2840 tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f),
2842 tcu::Sampler::MODE_DEPTH,
2845 if (samplerCreateInfo.anisotropyEnable)
2846 TCU_THROW(InternalError, "Anisotropic filtering is not supported by tcu::Sampler");
2848 switch (samplerCreateInfo.borderColor)
2850 case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
2851 sampler.borderColor = tcu::UVec4(0,0,0,1);
2853 case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
2854 sampler.borderColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
2856 case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
2857 sampler.borderColor = tcu::UVec4(1, 1, 1, 1);
2859 case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
2860 sampler.borderColor = tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f);
2862 case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
2863 sampler.borderColor = tcu::UVec4(0,0,0,0);
2865 case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
2866 sampler.borderColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f);
2877 tcu::Sampler::CompareMode mapVkSamplerCompareOp (VkCompareOp compareOp)
2881 case VK_COMPARE_OP_NEVER: return tcu::Sampler::COMPAREMODE_NEVER;
2882 case VK_COMPARE_OP_LESS: return tcu::Sampler::COMPAREMODE_LESS;
2883 case VK_COMPARE_OP_EQUAL: return tcu::Sampler::COMPAREMODE_EQUAL;
2884 case VK_COMPARE_OP_LESS_OR_EQUAL: return tcu::Sampler::COMPAREMODE_LESS_OR_EQUAL;
2885 case VK_COMPARE_OP_GREATER: return tcu::Sampler::COMPAREMODE_GREATER;
2886 case VK_COMPARE_OP_NOT_EQUAL: return tcu::Sampler::COMPAREMODE_NOT_EQUAL;
2887 case VK_COMPARE_OP_GREATER_OR_EQUAL: return tcu::Sampler::COMPAREMODE_GREATER_OR_EQUAL;
2888 case VK_COMPARE_OP_ALWAYS: return tcu::Sampler::COMPAREMODE_ALWAYS;
2894 return tcu::Sampler::COMPAREMODE_LAST;
2897 tcu::Sampler::WrapMode mapVkSamplerAddressMode (VkSamplerAddressMode addressMode)
2899 switch (addressMode)
2901 case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE: return tcu::Sampler::CLAMP_TO_EDGE;
2902 case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER: return tcu::Sampler::CLAMP_TO_BORDER;
2903 case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT: return tcu::Sampler::MIRRORED_REPEAT_GL;
2904 case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE: return tcu::Sampler::MIRRORED_ONCE;
2905 case VK_SAMPLER_ADDRESS_MODE_REPEAT: return tcu::Sampler::REPEAT_GL;
2911 return tcu::Sampler::WRAPMODE_LAST;
2914 tcu::Sampler::ReductionMode mapVkSamplerReductionMode (VkSamplerReductionMode reductionMode)
2916 switch (reductionMode)
2918 case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE: return tcu::Sampler::WEIGHTED_AVERAGE;
2919 case VK_SAMPLER_REDUCTION_MODE_MIN: return tcu::Sampler::MIN;
2920 case VK_SAMPLER_REDUCTION_MODE_MAX: return tcu::Sampler::MAX;
2926 return tcu::Sampler::REDUCTIONMODE_LAST;
2929 tcu::Sampler::FilterMode mapVkMinTexFilter (VkFilter filter, VkSamplerMipmapMode mipMode)
2933 case VK_FILTER_LINEAR:
2936 case VK_SAMPLER_MIPMAP_MODE_LINEAR: return tcu::Sampler::LINEAR_MIPMAP_LINEAR;
2937 case VK_SAMPLER_MIPMAP_MODE_NEAREST: return tcu::Sampler::LINEAR_MIPMAP_NEAREST;
2943 case VK_FILTER_NEAREST:
2946 case VK_SAMPLER_MIPMAP_MODE_LINEAR: return tcu::Sampler::NEAREST_MIPMAP_LINEAR;
2947 case VK_SAMPLER_MIPMAP_MODE_NEAREST: return tcu::Sampler::NEAREST_MIPMAP_NEAREST;
2958 return tcu::Sampler::FILTERMODE_LAST;
2961 tcu::Sampler::FilterMode mapVkMagTexFilter (VkFilter filter)
2965 case VK_FILTER_LINEAR: return tcu::Sampler::LINEAR;
2966 case VK_FILTER_NEAREST: return tcu::Sampler::NEAREST;
2972 return tcu::Sampler::FILTERMODE_LAST;
2975 //! Get a format the matches the layout in buffer memory used for a
2976 //! buffer<->image copy on a depth/stencil format.
2977 tcu::TextureFormat getDepthCopyFormat (VkFormat combinedFormat)
2979 switch (combinedFormat)
2981 case VK_FORMAT_D16_UNORM:
2982 case VK_FORMAT_X8_D24_UNORM_PACK32:
2983 case VK_FORMAT_D32_SFLOAT:
2984 return mapVkFormat(combinedFormat);
2986 case VK_FORMAT_D16_UNORM_S8_UINT:
2987 return mapVkFormat(VK_FORMAT_D16_UNORM);
2988 case VK_FORMAT_D24_UNORM_S8_UINT:
2989 return mapVkFormat(VK_FORMAT_X8_D24_UNORM_PACK32);
2990 case VK_FORMAT_D32_SFLOAT_S8_UINT:
2991 return mapVkFormat(VK_FORMAT_D32_SFLOAT);
2993 case VK_FORMAT_S8_UINT:
2995 DE_FATAL("Unexpected depth/stencil format");
2996 return tcu::TextureFormat();
3000 //! Get a format the matches the layout in buffer memory used for a
3001 //! buffer<->image copy on a depth/stencil format.
3002 tcu::TextureFormat getStencilCopyFormat (VkFormat combinedFormat)
3004 switch (combinedFormat)
3006 case VK_FORMAT_D16_UNORM_S8_UINT:
3007 case VK_FORMAT_D24_UNORM_S8_UINT:
3008 case VK_FORMAT_D32_SFLOAT_S8_UINT:
3009 case VK_FORMAT_S8_UINT:
3010 return mapVkFormat(VK_FORMAT_S8_UINT);
3012 case VK_FORMAT_D16_UNORM:
3013 case VK_FORMAT_X8_D24_UNORM_PACK32:
3014 case VK_FORMAT_D32_SFLOAT:
3016 DE_FATAL("Unexpected depth/stencil format");
3017 return tcu::TextureFormat();
3021 VkImageAspectFlags getImageAspectFlags (const tcu::TextureFormat textureFormat)
3023 VkImageAspectFlags imageAspectFlags = 0;
3025 if (tcu::hasDepthComponent(textureFormat.order))
3026 imageAspectFlags |= VK_IMAGE_ASPECT_DEPTH_BIT;
3028 if (tcu::hasStencilComponent(textureFormat.order))
3029 imageAspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
3031 if (imageAspectFlags == 0)
3032 imageAspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
3034 return imageAspectFlags;
3037 VkExtent3D mipLevelExtents (const VkExtent3D& baseExtents, const deUint32 mipLevel)
3041 result.width = std::max(baseExtents.width >> mipLevel, 1u);
3042 result.height = std::max(baseExtents.height >> mipLevel, 1u);
3043 result.depth = std::max(baseExtents.depth >> mipLevel, 1u);
3048 tcu::UVec3 alignedDivide (const VkExtent3D& extent, const VkExtent3D& divisor)
3052 result.x() = extent.width / divisor.width + ((extent.width % divisor.width != 0) ? 1u : 0u);
3053 result.y() = extent.height / divisor.height + ((extent.height % divisor.height != 0) ? 1u : 0u);
3054 result.z() = extent.depth / divisor.depth + ((extent.depth % divisor.depth != 0) ? 1u : 0u);
3059 void copyBufferToImage (const DeviceInterface& vk,
3060 const VkCommandBuffer& cmdBuffer,
3061 const VkBuffer& buffer,
3062 VkDeviceSize bufferSize,
3063 const std::vector<VkBufferImageCopy>& copyRegions,
3064 VkImageAspectFlags imageAspectFlags,
3066 deUint32 arrayLayers,
3068 VkImageLayout destImageLayout,
3069 VkPipelineStageFlags destImageDstStageFlags)
3071 // Barriers for copying buffer to image
3072 const VkBufferMemoryBarrier preBufferBarrier =
3074 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
3075 DE_NULL, // const void* pNext;
3076 VK_ACCESS_HOST_WRITE_BIT, // VkAccessFlags srcAccessMask;
3077 VK_ACCESS_TRANSFER_READ_BIT, // VkAccessFlags dstAccessMask;
3078 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3079 VK_QUEUE_FAMILY_IGNORED, // deUint32 dstQueueFamilyIndex;
3080 buffer, // VkBuffer buffer;
3081 0u, // VkDeviceSize offset;
3082 bufferSize // VkDeviceSize size;
3085 const VkImageMemoryBarrier preImageBarrier =
3087 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3088 DE_NULL, // const void* pNext;
3089 0u, // VkAccessFlags srcAccessMask;
3090 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3091 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout;
3092 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout;
3093 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3094 VK_QUEUE_FAMILY_IGNORED, // deUint32 dstQueueFamilyIndex;
3095 destImage, // VkImage image;
3096 { // VkImageSubresourceRange subresourceRange;
3097 imageAspectFlags, // VkImageAspectFlags aspect;
3098 0u, // deUint32 baseMipLevel;
3099 mipLevels, // deUint32 mipLevels;
3100 0u, // deUint32 baseArraySlice;
3101 arrayLayers // deUint32 arraySize;
3105 const VkImageMemoryBarrier postImageBarrier =
3107 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3108 DE_NULL, // const void* pNext;
3109 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3110 VK_ACCESS_SHADER_READ_BIT, // VkAccessFlags dstAccessMask;
3111 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout;
3112 destImageLayout, // VkImageLayout newLayout;
3113 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3114 VK_QUEUE_FAMILY_IGNORED, // deUint32 dstQueueFamilyIndex;
3115 destImage, // VkImage image;
3116 { // VkImageSubresourceRange subresourceRange;
3117 imageAspectFlags, // VkImageAspectFlags aspect;
3118 0u, // deUint32 baseMipLevel;
3119 mipLevels, // deUint32 mipLevels;
3120 0u, // deUint32 baseArraySlice;
3121 arrayLayers // deUint32 arraySize;
3125 // Copy buffer to image
3126 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &preBufferBarrier, 1, &preImageBarrier);
3127 vk.cmdCopyBufferToImage(cmdBuffer, buffer, destImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)copyRegions.size(), copyRegions.data());
3128 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, destImageDstStageFlags, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3131 void copyBufferToImage (const DeviceInterface& vk,
3134 deUint32 queueFamilyIndex,
3135 const VkBuffer& buffer,
3136 VkDeviceSize bufferSize,
3137 const std::vector<VkBufferImageCopy>& copyRegions,
3138 const VkSemaphore* waitSemaphore,
3139 VkImageAspectFlags imageAspectFlags,
3141 deUint32 arrayLayers,
3143 VkImageLayout destImageLayout,
3144 VkPipelineStageFlags destImageDstStageFlags)
3146 Move<VkCommandPool> cmdPool = createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
3147 Move<VkCommandBuffer> cmdBuffer = allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3148 Move<VkFence> fence = createFence(vk, device);
3150 const VkCommandBufferBeginInfo cmdBufferBeginInfo =
3152 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, // VkStructureType sType;
3153 DE_NULL, // const void* pNext;
3154 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, // VkCommandBufferUsageFlags flags;
3155 (const VkCommandBufferInheritanceInfo*)DE_NULL,
3158 VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
3159 copyBufferToImage(vk, *cmdBuffer, buffer, bufferSize, copyRegions, imageAspectFlags, mipLevels, arrayLayers, destImage, destImageLayout, destImageDstStageFlags);
3160 VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
3162 const VkPipelineStageFlags pipelineStageFlags = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
3164 const VkSubmitInfo submitInfo =
3166 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
3167 DE_NULL, // const void* pNext;
3168 waitSemaphore ? 1u : 0u, // deUint32 waitSemaphoreCount;
3169 waitSemaphore, // const VkSemaphore* pWaitSemaphores;
3170 &pipelineStageFlags, // const VkPipelineStageFlags* pWaitDstStageMask;
3171 1u, // deUint32 commandBufferCount;
3172 &cmdBuffer.get(), // const VkCommandBuffer* pCommandBuffers;
3173 0u, // deUint32 signalSemaphoreCount;
3174 DE_NULL // const VkSemaphore* pSignalSemaphores;
3179 VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
3180 VK_CHECK(vk.waitForFences(device, 1, &fence.get(), true, ~(0ull) /* infinity */));
3184 VK_CHECK(vk.deviceWaitIdle(device));
3189 void copyImageToBuffer (const DeviceInterface& vk,
3190 VkCommandBuffer cmdBuffer,
3194 VkAccessFlags srcAccessMask,
3195 VkImageLayout oldLayout,
3197 VkImageAspectFlags barrierAspect,
3198 VkImageAspectFlags copyAspect)
3200 const VkImageMemoryBarrier imageBarrier =
3202 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3203 DE_NULL, // const void* pNext;
3204 srcAccessMask, // VkAccessFlags srcAccessMask;
3205 VK_ACCESS_TRANSFER_READ_BIT, // VkAccessFlags dstAccessMask;
3206 oldLayout, // VkImageLayout oldLayout;
3207 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout;
3208 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3209 VK_QUEUE_FAMILY_IGNORED, // deUint32 destQueueFamilyIndex;
3210 image, // VkImage image;
3211 makeImageSubresourceRange(barrierAspect, 0u, 1u, 0, numLayers) // VkImageSubresourceRange subresourceRange;
3214 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
3215 0u, DE_NULL, 0u, DE_NULL, 1u, &imageBarrier);
3217 const VkImageSubresourceLayers subresource =
3219 copyAspect, // VkImageAspectFlags aspectMask;
3220 0u, // deUint32 mipLevel;
3221 0u, // deUint32 baseArrayLayer;
3222 numLayers // deUint32 layerCount;
3225 const VkBufferImageCopy region =
3227 0ull, // VkDeviceSize bufferOffset;
3228 0u, // deUint32 bufferRowLength;
3229 0u, // deUint32 bufferImageHeight;
3230 subresource, // VkImageSubresourceLayers imageSubresource;
3231 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
3232 makeExtent3D(size.x(), size.y(), 1u) // VkExtent3D imageExtent;
3235 vk.cmdCopyImageToBuffer(cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer, 1u, ®ion);
3237 const VkBufferMemoryBarrier bufferBarrier =
3239 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
3240 DE_NULL, // const void* pNext;
3241 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3242 VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
3243 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3244 VK_QUEUE_FAMILY_IGNORED, // deUint32 dstQueueFamilyIndex;
3245 buffer, // VkBuffer buffer;
3246 0ull, // VkDeviceSize offset;
3247 VK_WHOLE_SIZE // VkDeviceSize size;
3250 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u,
3251 0u, DE_NULL, 1u, &bufferBarrier, 0u, DE_NULL);
3254 void clearColorImage (const DeviceInterface& vk,
3255 const VkDevice device,
3256 const VkQueue queue,
3257 deUint32 queueFamilyIndex,
3259 tcu::Vec4 clearColor,
3260 VkImageLayout oldLayout,
3261 VkImageLayout newLayout,
3262 VkPipelineStageFlags dstStageFlags)
3264 Move<VkCommandPool> cmdPool = createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
3265 Move<VkCommandBuffer> cmdBuffer = allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3267 const VkClearColorValue clearColorValue = makeClearValueColor(clearColor).color;
3269 const VkImageSubresourceRange subresourceRange =
3271 VK_IMAGE_ASPECT_COLOR_BIT, // VkImageAspectFlags aspectMask
3272 0u, // deUint32 baseMipLevel
3273 1u, // deUint32 levelCount
3274 0u, // deUint32 baseArrayLayer
3275 1u, // deUint32 layerCount
3278 const VkImageMemoryBarrier preImageBarrier =
3280 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3281 DE_NULL, // const void* pNext;
3282 0u, // VkAccessFlags srcAccessMask;
3283 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3284 oldLayout, // VkImageLayout oldLayout;
3285 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout;
3286 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3287 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3288 image, // VkImage image;
3289 subresourceRange // VkImageSubresourceRange subresourceRange;
3292 const VkImageMemoryBarrier postImageBarrier =
3294 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3295 DE_NULL, // const void* pNext;
3296 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3297 VK_ACCESS_SHADER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3298 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout;
3299 newLayout, // VkImageLayout newLayout;
3300 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3301 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3302 image, // VkImage image;
3303 subresourceRange // VkImageSubresourceRange subresourceRange;
3306 beginCommandBuffer(vk, *cmdBuffer);
3307 vk.cmdPipelineBarrier(*cmdBuffer,
3308 VK_PIPELINE_STAGE_HOST_BIT,
3309 VK_PIPELINE_STAGE_TRANSFER_BIT,
3310 (VkDependencyFlags)0,
3311 0, (const VkMemoryBarrier*)DE_NULL,
3312 0, (const VkBufferMemoryBarrier*)DE_NULL,
3313 1, &preImageBarrier);
3314 vk.cmdClearColorImage(*cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clearColorValue, 1, &subresourceRange);
3315 vk.cmdPipelineBarrier(*cmdBuffer,
3316 VK_PIPELINE_STAGE_TRANSFER_BIT,
3318 (VkDependencyFlags)0,
3319 0, (const VkMemoryBarrier*)DE_NULL,
3320 0, (const VkBufferMemoryBarrier*)DE_NULL,
3321 1, &postImageBarrier);
3322 endCommandBuffer(vk, *cmdBuffer);
3324 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
3327 std::vector<VkBufferImageCopy> generateChessboardCopyRegions (deUint32 tileSize,
3328 deUint32 imageWidth,
3329 deUint32 imageHeight,
3331 VkImageAspectFlags aspectMask)
3333 std::vector<VkBufferImageCopy> copyRegions;
3335 for (deUint32 x = 0; x < (deUint32)deFloatCeil((float)imageWidth / (float)tileSize); x++)
3336 for (deUint32 y = 0; y < (deUint32)deFloatCeil((float)imageHeight / (float)tileSize); y++)
3338 if ((x + tileIdx) % 2 == y % 2) continue;
3340 const deUint32 tileWidth = de::min(tileSize, imageWidth - tileSize * x);
3341 const deUint32 tileHeight = de::min(tileSize, imageHeight - tileSize * y);
3343 const VkOffset3D offset =
3345 (deInt32)x * (deInt32)tileWidth, // deInt32 x
3346 (deInt32)y * (deInt32)tileHeight, // deInt32 y
3350 const VkExtent3D extent =
3352 tileWidth, // deUint32 width
3353 tileHeight, // deUint32 height
3354 1u // deUint32 depth
3357 const VkImageSubresourceLayers subresourceLayers =
3359 aspectMask, // VkImageAspectFlags aspectMask
3360 0u, // deUint32 mipLevel
3361 0u, // deUint32 baseArrayLayer
3362 1u, // deUint32 layerCount
3365 const VkBufferImageCopy copy =
3367 (VkDeviceSize)0, // VkDeviceSize bufferOffset
3368 0u, // deUint32 bufferRowLength
3369 0u, // deUint32 bufferImageHeight
3370 subresourceLayers, // VkImageSubresourceLayers imageSubresource
3371 offset, // VkOffset3D imageOffset
3372 extent // VkExtent3D imageExtent
3375 copyRegions.push_back(copy);
3381 void initColorImageChessboardPattern (const DeviceInterface& vk,
3382 const VkDevice device,
3383 const VkQueue queue,
3384 deUint32 queueFamilyIndex,
3385 Allocator& allocator,
3388 tcu::Vec4 colorValue0,
3389 tcu::Vec4 colorValue1,
3390 deUint32 imageWidth,
3391 deUint32 imageHeight,
3393 VkImageLayout oldLayout,
3394 VkImageLayout newLayout,
3395 VkPipelineStageFlags dstStageFlags)
3397 Move<VkCommandPool> cmdPool = createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
3398 Move<VkCommandBuffer> cmdBuffer = allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3399 const tcu::TextureFormat tcuFormat = mapVkFormat(format);
3400 const tcu::Vec4 colorValues[] = { colorValue0, colorValue1 };
3401 const deUint32 bufferSize = tileSize * tileSize * tcuFormat.getPixelSize();
3403 const VkImageSubresourceRange subresourceRange =
3405 VK_IMAGE_ASPECT_COLOR_BIT, // VkImageAspectFlags aspectMask
3406 0u, // deUint32 baseMipLevel
3407 1u, // deUint32 levelCount
3408 0u, // deUint32 baseArrayLayer
3409 1u // deUint32 layerCount
3412 const VkImageMemoryBarrier preImageBarrier =
3414 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3415 DE_NULL, // const void* pNext;
3416 0u, // VkAccessFlags srcAccessMask;
3417 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3418 oldLayout, // VkImageLayout oldLayout;
3419 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout;
3420 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3421 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3422 image, // VkImage image;
3423 subresourceRange // VkImageSubresourceRange subresourceRange;
3426 const VkImageMemoryBarrier postImageBarrier =
3428 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3429 DE_NULL, // const void* pNext;
3430 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3431 VK_ACCESS_SHADER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3432 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout;
3433 newLayout, // VkImageLayout newLayout;
3434 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3435 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3436 image, // VkImage image;
3437 subresourceRange // VkImageSubresourceRange subresourceRange;
3440 // Create staging buffers for both color values
3441 Move<VkBuffer> buffers[2];
3442 de::MovePtr<Allocation> bufferAllocs[2];
3444 const VkBufferCreateInfo bufferParams =
3446 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType
3447 DE_NULL, // const void* pNext
3448 0u, // VkBufferCreateFlags flags
3449 (VkDeviceSize)bufferSize, // VkDeviceSize size
3450 VK_BUFFER_USAGE_TRANSFER_SRC_BIT, // VkBufferUsageFlags usage
3451 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode
3452 0u, // deUint32 queueFamilyIndexCount
3453 DE_NULL // const deUint32* pQueueFamilyIndices
3456 for (deUint32 bufferIdx = 0; bufferIdx < 2; bufferIdx++)
3458 buffers[bufferIdx] = createBuffer(vk, device, &bufferParams);
3459 bufferAllocs[bufferIdx] = allocator.allocate(getBufferMemoryRequirements(vk, device, *buffers[bufferIdx]), MemoryRequirement::HostVisible);
3460 VK_CHECK(vk.bindBufferMemory(device, *buffers[bufferIdx], bufferAllocs[bufferIdx]->getMemory(), bufferAllocs[bufferIdx]->getOffset()));
3462 deUint32* dstPtr = (deUint32*)bufferAllocs[bufferIdx]->getHostPtr();
3463 tcu::PixelBufferAccess access (tcuFormat, tileSize, tileSize, 1, dstPtr);
3465 for (deUint32 x = 0; x < tileSize; x++)
3466 for (deUint32 y = 0; y < tileSize; y++)
3467 access.setPixel(colorValues[bufferIdx], x, y, 0);
3469 flushAlloc(vk, device, *bufferAllocs[bufferIdx]);
3472 beginCommandBuffer(vk, *cmdBuffer);
3473 vk.cmdPipelineBarrier(*cmdBuffer,
3474 VK_PIPELINE_STAGE_HOST_BIT,
3475 VK_PIPELINE_STAGE_TRANSFER_BIT,
3476 (VkDependencyFlags)0,
3477 0, (const VkMemoryBarrier*)DE_NULL,
3478 0, (const VkBufferMemoryBarrier*)DE_NULL,
3479 1, &preImageBarrier);
3481 for (deUint32 bufferIdx = 0; bufferIdx < 2; bufferIdx++)
3483 std::vector<VkBufferImageCopy> copyRegions = generateChessboardCopyRegions(tileSize, imageWidth, imageHeight, bufferIdx, VK_IMAGE_ASPECT_COLOR_BIT);
3485 vk.cmdCopyBufferToImage(*cmdBuffer, *buffers[bufferIdx], image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)copyRegions.size(), copyRegions.data());
3488 vk.cmdPipelineBarrier(*cmdBuffer,
3489 VK_PIPELINE_STAGE_TRANSFER_BIT,
3491 (VkDependencyFlags)0,
3492 0, (const VkMemoryBarrier*)DE_NULL,
3493 0, (const VkBufferMemoryBarrier*)DE_NULL,
3494 1, &postImageBarrier);
3496 endCommandBuffer(vk, *cmdBuffer);
3498 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
3501 void copyDepthStencilImageToBuffers (const DeviceInterface& vk,
3502 VkCommandBuffer cmdBuffer,
3504 VkBuffer depthBuffer,
3505 VkBuffer stencilBuffer,
3507 VkAccessFlags srcAccessMask,
3508 VkImageLayout oldLayout,
3511 const VkImageAspectFlags aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
3512 const VkImageMemoryBarrier imageBarrier =
3514 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3515 DE_NULL, // const void* pNext;
3516 srcAccessMask, // VkAccessFlags srcAccessMask;
3517 VK_ACCESS_TRANSFER_READ_BIT, // VkAccessFlags dstAccessMask;
3518 oldLayout, // VkImageLayout oldLayout;
3519 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout;
3520 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3521 VK_QUEUE_FAMILY_IGNORED, // deUint32 destQueueFamilyIndex;
3522 image, // VkImage image;
3523 makeImageSubresourceRange(aspect, 0u, 1u, 0, numLayers) // VkImageSubresourceRange subresourceRange;
3526 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
3527 0u, DE_NULL, 0u, DE_NULL, 1u, &imageBarrier);
3529 const VkImageSubresourceLayers subresourceDepth =
3531 VK_IMAGE_ASPECT_DEPTH_BIT, // VkImageAspectFlags aspectMask;
3532 0u, // deUint32 mipLevel;
3533 0u, // deUint32 baseArrayLayer;
3534 numLayers // deUint32 layerCount;
3537 const VkBufferImageCopy regionDepth =
3539 0ull, // VkDeviceSize bufferOffset;
3540 0u, // deUint32 bufferRowLength;
3541 0u, // deUint32 bufferImageHeight;
3542 subresourceDepth, // VkImageSubresourceLayers imageSubresource;
3543 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
3544 makeExtent3D(size.x(), size.y(), 1u) // VkExtent3D imageExtent;
3547 const VkImageSubresourceLayers subresourceStencil =
3549 VK_IMAGE_ASPECT_STENCIL_BIT, // VkImageAspectFlags aspectMask;
3550 0u, // deUint32 mipLevel;
3551 0u, // deUint32 baseArrayLayer;
3552 numLayers // deUint32 layerCount;
3555 const VkBufferImageCopy regionStencil =
3557 0ull, // VkDeviceSize bufferOffset;
3558 0u, // deUint32 bufferRowLength;
3559 0u, // deUint32 bufferImageHeight;
3560 subresourceStencil, // VkImageSubresourceLayers imageSubresource;
3561 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
3562 makeExtent3D(size.x(), size.y(), 1u) // VkExtent3D imageExtent;
3565 vk.cmdCopyImageToBuffer(cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, depthBuffer, 1u, ®ionDepth);
3566 vk.cmdCopyImageToBuffer(cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, stencilBuffer, 1u, ®ionStencil);
3568 const VkBufferMemoryBarrier bufferBarriers[] =
3571 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
3572 DE_NULL, // const void* pNext;
3573 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3574 VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
3575 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3576 VK_QUEUE_FAMILY_IGNORED, // deUint32 dstQueueFamilyIndex;
3577 depthBuffer, // VkBuffer buffer;
3578 0ull, // VkDeviceSize offset;
3579 VK_WHOLE_SIZE // VkDeviceSize size;
3582 VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, // VkStructureType sType;
3583 DE_NULL, // const void* pNext;
3584 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3585 VK_ACCESS_HOST_READ_BIT, // VkAccessFlags dstAccessMask;
3586 VK_QUEUE_FAMILY_IGNORED, // deUint32 srcQueueFamilyIndex;
3587 VK_QUEUE_FAMILY_IGNORED, // deUint32 dstQueueFamilyIndex;
3588 stencilBuffer, // VkBuffer buffer;
3589 0ull, // VkDeviceSize offset;
3590 VK_WHOLE_SIZE // VkDeviceSize size;
3594 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u,
3595 0u, DE_NULL, 2u, bufferBarriers, 0u, DE_NULL);
3598 void clearDepthStencilImage (const DeviceInterface& vk,
3599 const VkDevice device,
3600 const VkQueue queue,
3601 deUint32 queueFamilyIndex,
3604 deUint32 stencilValue,
3605 VkImageLayout oldLayout,
3606 VkImageLayout newLayout,
3607 VkPipelineStageFlags dstStageFlags)
3609 Move<VkCommandPool> cmdPool = createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
3610 Move<VkCommandBuffer> cmdBuffer = allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3612 const VkClearDepthStencilValue clearValue = makeClearValueDepthStencil(depthValue, stencilValue).depthStencil;
3614 const VkImageSubresourceRange subresourceRange =
3616 VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, // VkImageAspectFlags aspectMask
3617 0u, // deUint32 baseMipLevel
3618 1u, // deUint32 levelCount
3619 0u, // deUint32 baseArrayLayer
3620 1u // deUint32 layerCount
3623 const VkImageMemoryBarrier preImageBarrier =
3625 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3626 DE_NULL, // const void* pNext;
3627 0u, // VkAccessFlags srcAccessMask;
3628 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3629 oldLayout, // VkImageLayout oldLayout;
3630 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout;
3631 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3632 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3633 image, // VkImage image;
3634 subresourceRange // VkImageSubresourceRange subresourceRange;
3637 const VkImageMemoryBarrier postImageBarrier =
3639 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3640 DE_NULL, // const void* pNext;
3641 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3642 VK_ACCESS_SHADER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3643 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout;
3644 newLayout, // VkImageLayout newLayout;
3645 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3646 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3647 image, // VkImage image;
3648 subresourceRange // VkImageSubresourceRange subresourceRange;
3651 beginCommandBuffer(vk, *cmdBuffer);
3652 vk.cmdPipelineBarrier(*cmdBuffer,
3653 VK_PIPELINE_STAGE_HOST_BIT,
3654 VK_PIPELINE_STAGE_TRANSFER_BIT,
3655 (VkDependencyFlags)0,
3656 0, (const VkMemoryBarrier*)DE_NULL,
3657 0, (const VkBufferMemoryBarrier*)DE_NULL,
3658 1, &preImageBarrier);
3659 vk.cmdClearDepthStencilImage(*cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clearValue, 1, &subresourceRange);
3660 vk.cmdPipelineBarrier(*cmdBuffer,
3661 VK_PIPELINE_STAGE_TRANSFER_BIT,
3663 (VkDependencyFlags)0,
3664 0, (const VkMemoryBarrier*)DE_NULL,
3665 0, (const VkBufferMemoryBarrier*)DE_NULL,
3666 1, &postImageBarrier);
3667 endCommandBuffer(vk, *cmdBuffer);
3669 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
3672 void initDepthStencilImageChessboardPattern (const DeviceInterface& vk,
3673 const VkDevice device,
3674 const VkQueue queue,
3675 deUint32 queueFamilyIndex,
3676 Allocator& allocator,
3681 deUint32 stencilValue0,
3682 deUint32 stencilValue1,
3683 deUint32 imageWidth,
3684 deUint32 imageHeight,
3686 VkImageLayout oldLayout,
3687 VkImageLayout newLayout,
3688 VkPipelineStageFlags dstStageFlags)
3690 Move<VkCommandPool> cmdPool = createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, queueFamilyIndex);
3691 Move<VkCommandBuffer> cmdBuffer = allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3693 const deUint32 depthBufferSize = tileSize * tileSize * 4;
3694 const deUint32 stencilBufferSize = tileSize * tileSize;
3695 const float depthValues[] = { depthValue0, depthValue1 };
3696 const deUint32 stencilValues[] = { stencilValue0, stencilValue1 };
3697 const tcu::TextureFormat tcuFormat = mapVkFormat(format);
3699 const VkImageSubresourceRange subresourceRange =
3701 VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, // VkImageAspectFlags aspectMask
3702 0u, // deUint32 baseMipLevel
3703 1u, // deUint32 levelCount
3704 0u, // deUint32 baseArrayLayer
3705 1u // deUint32 layerCount
3708 const VkImageMemoryBarrier preImageBarrier =
3710 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3711 DE_NULL, // const void* pNext;
3712 0u, // VkAccessFlags srcAccessMask;
3713 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3714 oldLayout, // VkImageLayout oldLayout;
3715 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout;
3716 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3717 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3718 image, // VkImage image;
3719 subresourceRange // VkImageSubresourceRange subresourceRange;
3722 const VkImageMemoryBarrier postImageBarrier =
3724 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType sType;
3725 DE_NULL, // const void* pNext;
3726 VK_ACCESS_TRANSFER_WRITE_BIT, // VkAccessFlags srcAccessMask;
3727 VK_ACCESS_SHADER_WRITE_BIT, // VkAccessFlags dstAccessMask;
3728 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout;
3729 newLayout, // VkImageLayout newLayout;
3730 queueFamilyIndex, // deUint32 srcQueueFamilyIndex;
3731 queueFamilyIndex, // deUint32 dstQueueFamilyIndex;
3732 image, // VkImage image;
3733 subresourceRange // VkImageSubresourceRange subresourceRange;
3736 // Create staging buffers for depth and stencil values
3737 Move<VkBuffer> depthBuffers[2];
3738 de::MovePtr<Allocation> depthBufferAllocs[2];
3739 Move<VkBuffer> stencilBuffers[2];
3740 de::MovePtr<Allocation> stencilBufferAllocs[2];
3742 const VkBufferCreateInfo depthBufferParams =
3744 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType
3745 DE_NULL, // const void* pNext
3746 0u, // VkBufferCreateFlags flags
3747 (VkDeviceSize)depthBufferSize, // VkDeviceSize size
3748 VK_BUFFER_USAGE_TRANSFER_SRC_BIT, // VkBufferUsageFlags usage
3749 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode
3750 0u, // deUint32 queueFamilyIndexCount
3751 DE_NULL // const deUint32* pQueueFamilyIndices
3754 const VkBufferCreateInfo stencilBufferParams =
3756 VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType
3757 DE_NULL, // const void* pNext
3758 0u, // VkBufferCreateFlags flags
3759 (VkDeviceSize)stencilBufferSize, // VkDeviceSize size
3760 VK_BUFFER_USAGE_TRANSFER_SRC_BIT, // VkBufferUsageFlags usage
3761 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode
3762 0u, // deUint32 queueFamilyIndexCount
3763 DE_NULL // const deUint32* pQueueFamilyIndices
3766 for (deUint32 bufferIdx = 0; bufferIdx < 2; bufferIdx++)
3768 depthBuffers[bufferIdx] = createBuffer(vk, device, &depthBufferParams);
3769 depthBufferAllocs[bufferIdx] = allocator.allocate(getBufferMemoryRequirements(vk, device, *depthBuffers[bufferIdx]), MemoryRequirement::HostVisible);
3770 VK_CHECK(vk.bindBufferMemory(device, *depthBuffers[bufferIdx], depthBufferAllocs[bufferIdx]->getMemory(), depthBufferAllocs[bufferIdx]->getOffset()));
3771 stencilBuffers[bufferIdx] = createBuffer(vk, device, &stencilBufferParams);
3772 stencilBufferAllocs[bufferIdx] = allocator.allocate(getBufferMemoryRequirements(vk, device, *stencilBuffers[bufferIdx]), MemoryRequirement::HostVisible);
3773 VK_CHECK(vk.bindBufferMemory(device, *stencilBuffers[bufferIdx], stencilBufferAllocs[bufferIdx]->getMemory(), stencilBufferAllocs[bufferIdx]->getOffset()));
3775 deUint32* depthPtr = (deUint32*)depthBufferAllocs[bufferIdx]->getHostPtr();
3776 deUint32* stencilPtr = (deUint32*)stencilBufferAllocs[bufferIdx]->getHostPtr();
3778 if (format == VK_FORMAT_D24_UNORM_S8_UINT)
3780 tcu::PixelBufferAccess access(tcuFormat, tileSize, tileSize, 1, depthPtr);
3782 for (deUint32 x = 0; x < tileSize; x++)
3783 for (deUint32 y = 0; y < tileSize; y++)
3784 access.setPixDepth(depthValues[bufferIdx], x, y, 0);
3788 DE_ASSERT(format == VK_FORMAT_D32_SFLOAT_S8_UINT);
3790 for (deUint32 i = 0; i < tileSize * tileSize; i++)
3791 ((float*)depthPtr)[i] = depthValues[bufferIdx];
3794 deMemset(stencilPtr, stencilValues[bufferIdx], stencilBufferSize);
3795 flushAlloc(vk, device, *depthBufferAllocs[bufferIdx]);
3796 flushAlloc(vk, device, *stencilBufferAllocs[bufferIdx]);
3799 beginCommandBuffer(vk, *cmdBuffer);
3800 vk.cmdPipelineBarrier(*cmdBuffer,
3801 VK_PIPELINE_STAGE_HOST_BIT,
3802 VK_PIPELINE_STAGE_TRANSFER_BIT,
3803 (VkDependencyFlags)0,
3804 0, (const VkMemoryBarrier*)DE_NULL,
3805 0, (const VkBufferMemoryBarrier*)DE_NULL,
3806 1, &preImageBarrier);
3808 for (deUint32 bufferIdx = 0; bufferIdx < 2; bufferIdx++)
3810 std::vector<VkBufferImageCopy> copyRegionsDepth = generateChessboardCopyRegions(tileSize, imageWidth, imageHeight, bufferIdx, VK_IMAGE_ASPECT_DEPTH_BIT);
3811 std::vector<VkBufferImageCopy> copyRegionsStencil = generateChessboardCopyRegions(tileSize, imageWidth, imageHeight, bufferIdx, VK_IMAGE_ASPECT_STENCIL_BIT);
3813 vk.cmdCopyBufferToImage(*cmdBuffer, *depthBuffers[bufferIdx], image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)copyRegionsDepth.size(), copyRegionsDepth.data());
3814 vk.cmdCopyBufferToImage(*cmdBuffer, *stencilBuffers[bufferIdx], image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)copyRegionsStencil.size(), copyRegionsStencil.data());
3817 vk.cmdPipelineBarrier(*cmdBuffer,
3818 VK_PIPELINE_STAGE_TRANSFER_BIT,
3820 (VkDependencyFlags)0,
3821 0, (const VkMemoryBarrier*)DE_NULL,
3822 0, (const VkBufferMemoryBarrier*)DE_NULL,
3823 1, &postImageBarrier);
3825 endCommandBuffer(vk, *cmdBuffer);
3827 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
3830 void allocateAndBindSparseImage (const DeviceInterface& vk,
3832 const VkPhysicalDevice physicalDevice,
3833 const InstanceInterface& instance,
3834 const VkImageCreateInfo& imageCreateInfo,
3835 const VkSemaphore& signalSemaphore,
3837 Allocator& allocator,
3838 std::vector<de::SharedPtr<Allocation> >& allocations,
3839 tcu::TextureFormat format,
3842 const VkImageAspectFlags imageAspectFlags = getImageAspectFlags(format);
3843 const VkPhysicalDeviceProperties deviceProperties = getPhysicalDeviceProperties(instance, physicalDevice);
3844 const VkPhysicalDeviceMemoryProperties deviceMemoryProperties = getPhysicalDeviceMemoryProperties(instance, physicalDevice);
3845 deUint32 sparseMemoryReqCount = 0;
3847 // Check if the image format supports sparse operations
3848 if (!checkSparseImageFormatSupport(physicalDevice, instance, imageCreateInfo))
3849 TCU_THROW(NotSupportedError, "The image format does not support sparse operations.");
3851 vk.getImageSparseMemoryRequirements(device, destImage, &sparseMemoryReqCount, DE_NULL);
3853 DE_ASSERT(sparseMemoryReqCount != 0);
3855 std::vector<VkSparseImageMemoryRequirements> sparseImageMemoryRequirements;
3856 sparseImageMemoryRequirements.resize(sparseMemoryReqCount);
3858 vk.getImageSparseMemoryRequirements(device, destImage, &sparseMemoryReqCount, &sparseImageMemoryRequirements[0]);
3860 const deUint32 noMatchFound = ~((deUint32)0);
3862 deUint32 aspectIndex = noMatchFound;
3863 for (deUint32 memoryReqNdx = 0; memoryReqNdx < sparseMemoryReqCount; ++memoryReqNdx)
3865 if (sparseImageMemoryRequirements[memoryReqNdx].formatProperties.aspectMask == imageAspectFlags)
3867 aspectIndex = memoryReqNdx;
3872 deUint32 metadataAspectIndex = noMatchFound;
3873 for (deUint32 memoryReqNdx = 0; memoryReqNdx < sparseMemoryReqCount; ++memoryReqNdx)
3875 if (sparseImageMemoryRequirements[memoryReqNdx].formatProperties.aspectMask & VK_IMAGE_ASPECT_METADATA_BIT)
3877 metadataAspectIndex = memoryReqNdx;
3882 if (aspectIndex == noMatchFound)
3883 TCU_THROW(NotSupportedError, "Required image aspect not supported.");
3885 const VkMemoryRequirements memoryRequirements = getImageMemoryRequirements(vk, device, destImage);
3887 deUint32 memoryType = noMatchFound;
3888 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < deviceMemoryProperties.memoryTypeCount; ++memoryTypeNdx)
3890 if ((memoryRequirements.memoryTypeBits & (1u << memoryTypeNdx)) != 0 &&
3891 MemoryRequirement::Any.matchesHeap(deviceMemoryProperties.memoryTypes[memoryTypeNdx].propertyFlags))
3893 memoryType = memoryTypeNdx;
3898 if (memoryType == noMatchFound)
3899 TCU_THROW(NotSupportedError, "No matching memory type found.");
3901 if (memoryRequirements.size > deviceProperties.limits.sparseAddressSpaceSize)
3902 TCU_THROW(NotSupportedError, "Required memory size for sparse resource exceeds device limits.");
3904 const VkSparseImageMemoryRequirements aspectRequirements = sparseImageMemoryRequirements[aspectIndex];
3905 const VkExtent3D imageGranularity = aspectRequirements.formatProperties.imageGranularity;
3907 std::vector<VkSparseImageMemoryBind> imageResidencyMemoryBinds;
3908 std::vector<VkSparseMemoryBind> imageMipTailMemoryBinds;
3910 for (deUint32 layerNdx = 0; layerNdx < imageCreateInfo.arrayLayers; ++layerNdx)
3912 for (deUint32 mipLevelNdx = 0; mipLevelNdx < aspectRequirements.imageMipTailFirstLod; ++mipLevelNdx)
3914 const VkExtent3D mipExtent = mipLevelExtents(imageCreateInfo.extent, mipLevelNdx);
3915 const tcu::UVec3 numSparseBinds = alignedDivide(mipExtent, imageGranularity);
3916 const tcu::UVec3 lastBlockExtent = tcu::UVec3(mipExtent.width % imageGranularity.width ? mipExtent.width % imageGranularity.width : imageGranularity.width,
3917 mipExtent.height % imageGranularity.height ? mipExtent.height % imageGranularity.height : imageGranularity.height,
3918 mipExtent.depth % imageGranularity.depth ? mipExtent.depth % imageGranularity.depth : imageGranularity.depth );
3920 for (deUint32 z = 0; z < numSparseBinds.z(); ++z)
3921 for (deUint32 y = 0; y < numSparseBinds.y(); ++y)
3922 for (deUint32 x = 0; x < numSparseBinds.x(); ++x)
3924 const VkMemoryRequirements allocRequirements =
3926 // 28.7.5 alignment shows the block size in bytes
3927 memoryRequirements.alignment, // VkDeviceSize size;
3928 memoryRequirements.alignment, // VkDeviceSize alignment;
3929 memoryRequirements.memoryTypeBits, // uint32_t memoryTypeBits;
3932 de::SharedPtr<Allocation> allocation(allocator.allocate(allocRequirements, MemoryRequirement::Any).release());
3933 allocations.push_back(allocation);
3936 offset.x = x*imageGranularity.width;
3937 offset.y = y*imageGranularity.height;
3938 offset.z = z*imageGranularity.depth;
3941 extent.width = (x == numSparseBinds.x() - 1) ? lastBlockExtent.x() : imageGranularity.width;
3942 extent.height = (y == numSparseBinds.y() - 1) ? lastBlockExtent.y() : imageGranularity.height;
3943 extent.depth = (z == numSparseBinds.z() - 1) ? lastBlockExtent.z() : imageGranularity.depth;
3945 const VkSparseImageMemoryBind imageMemoryBind =
3948 imageAspectFlags, // VkImageAspectFlags aspectMask;
3949 mipLevelNdx, // uint32_t mipLevel;
3950 layerNdx, // uint32_t arrayLayer;
3951 }, // VkImageSubresource subresource;
3952 offset, // VkOffset3D offset;
3953 extent, // VkExtent3D extent;
3954 allocation->getMemory(), // VkDeviceMemory memory;
3955 allocation->getOffset(), // VkDeviceSize memoryOffset;
3956 0u, // VkSparseMemoryBindFlags flags;
3959 imageResidencyMemoryBinds.push_back(imageMemoryBind);
3963 // Handle MIP tail. There are two cases to consider here:
3965 // 1) VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT is requested by the driver: each layer needs a separate tail.
3966 // 2) otherwise: only one tail is needed.
3967 if (aspectRequirements.imageMipTailSize > 0)
3969 if (layerNdx == 0 || (aspectRequirements.formatProperties.flags & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) == 0)
3971 const VkMemoryRequirements allocRequirements =
3973 aspectRequirements.imageMipTailSize, // VkDeviceSize size;
3974 memoryRequirements.alignment, // VkDeviceSize alignment;
3975 memoryRequirements.memoryTypeBits, // uint32_t memoryTypeBits;
3978 const de::SharedPtr<Allocation> allocation(allocator.allocate(allocRequirements, MemoryRequirement::Any).release());
3980 const VkSparseMemoryBind imageMipTailMemoryBind =
3982 aspectRequirements.imageMipTailOffset + layerNdx * aspectRequirements.imageMipTailStride, // VkDeviceSize resourceOffset;
3983 aspectRequirements.imageMipTailSize, // VkDeviceSize size;
3984 allocation->getMemory(), // VkDeviceMemory memory;
3985 allocation->getOffset(), // VkDeviceSize memoryOffset;
3986 0u, // VkSparseMemoryBindFlags flags;
3989 allocations.push_back(allocation);
3991 imageMipTailMemoryBinds.push_back(imageMipTailMemoryBind);
3995 // Handle Metadata. Similarly to MIP tail in aspectRequirements, there are two cases to consider here:
3997 // 1) VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT is requested by the driver: each layer needs a separate tail.
3999 if (metadataAspectIndex != noMatchFound)
4001 const VkSparseImageMemoryRequirements metadataAspectRequirements = sparseImageMemoryRequirements[metadataAspectIndex];
4003 if (layerNdx == 0 || (metadataAspectRequirements.formatProperties.flags & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) == 0)
4005 const VkMemoryRequirements metadataAllocRequirements =
4007 metadataAspectRequirements.imageMipTailSize, // VkDeviceSize size;
4008 memoryRequirements.alignment, // VkDeviceSize alignment;
4009 memoryRequirements.memoryTypeBits, // uint32_t memoryTypeBits;
4011 const de::SharedPtr<Allocation> metadataAllocation(allocator.allocate(metadataAllocRequirements, MemoryRequirement::Any).release());
4013 const VkSparseMemoryBind metadataMipTailMemoryBind =
4015 metadataAspectRequirements.imageMipTailOffset +
4016 layerNdx * metadataAspectRequirements.imageMipTailStride, // VkDeviceSize resourceOffset;
4017 metadataAspectRequirements.imageMipTailSize, // VkDeviceSize size;
4018 metadataAllocation->getMemory(), // VkDeviceMemory memory;
4019 metadataAllocation->getOffset(), // VkDeviceSize memoryOffset;
4020 VK_SPARSE_MEMORY_BIND_METADATA_BIT // VkSparseMemoryBindFlags flags;
4023 allocations.push_back(metadataAllocation);
4025 imageMipTailMemoryBinds.push_back(metadataMipTailMemoryBind);
4030 VkBindSparseInfo bindSparseInfo =
4032 VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, //VkStructureType sType;
4033 DE_NULL, //const void* pNext;
4034 0u, //deUint32 waitSemaphoreCount;
4035 DE_NULL, //const VkSemaphore* pWaitSemaphores;
4036 0u, //deUint32 bufferBindCount;
4037 DE_NULL, //const VkSparseBufferMemoryBindInfo* pBufferBinds;
4038 0u, //deUint32 imageOpaqueBindCount;
4039 DE_NULL, //const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
4040 0u, //deUint32 imageBindCount;
4041 DE_NULL, //const VkSparseImageMemoryBindInfo* pImageBinds;
4042 1u, //deUint32 signalSemaphoreCount;
4043 &signalSemaphore //const VkSemaphore* pSignalSemaphores;
4046 VkSparseImageMemoryBindInfo imageResidencyBindInfo;
4047 VkSparseImageOpaqueMemoryBindInfo imageMipTailBindInfo;
4049 if (imageResidencyMemoryBinds.size() > 0)
4051 imageResidencyBindInfo.image = destImage;
4052 imageResidencyBindInfo.bindCount = static_cast<deUint32>(imageResidencyMemoryBinds.size());
4053 imageResidencyBindInfo.pBinds = &imageResidencyMemoryBinds[0];
4055 bindSparseInfo.imageBindCount = 1u;
4056 bindSparseInfo.pImageBinds = &imageResidencyBindInfo;
4059 if (imageMipTailMemoryBinds.size() > 0)
4061 imageMipTailBindInfo.image = destImage;
4062 imageMipTailBindInfo.bindCount = static_cast<deUint32>(imageMipTailMemoryBinds.size());
4063 imageMipTailBindInfo.pBinds = &imageMipTailMemoryBinds[0];
4065 bindSparseInfo.imageOpaqueBindCount = 1u;
4066 bindSparseInfo.pImageOpaqueBinds = &imageMipTailBindInfo;
4069 VK_CHECK(vk.queueBindSparse(queue, 1u, &bindSparseInfo, DE_NULL));
4072 bool checkSparseImageFormatSupport (const VkPhysicalDevice physicalDevice,
4073 const InstanceInterface& instance,
4074 const VkImageCreateInfo& imageCreateInfo)
4076 const std::vector<VkSparseImageFormatProperties> sparseImageFormatPropVec =
4077 getPhysicalDeviceSparseImageFormatProperties(instance, physicalDevice, imageCreateInfo.format, imageCreateInfo.imageType, imageCreateInfo.samples, imageCreateInfo.usage, imageCreateInfo.tiling);
4079 return (sparseImageFormatPropVec.size() != 0);