Use correct input attachment index in descriptor limits tests
[platform/upstream/VK-GL-CTS.git] / external / vulkancts / modules / vulkan / binding_model / vktBindingValveMutableTests.cpp
1 /*-------------------------------------------------------------------------
2  * Vulkan Conformance Tests
3  * ------------------------
4  *
5  * Copyright (c) 2021 The Khronos Group Inc.
6  * Copyright (c) 2021 Valve Corporation.
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  *      http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  *
20  *//*!
21  * \file
22  * \brief Tests for VK_VALVE_mutable_descriptor_type.
23  *//*--------------------------------------------------------------------*/
24 #include "vktBindingValveMutableTests.hpp"
25 #include "vktTestCase.hpp"
26
27 #include "vkDefs.hpp"
28 #include "vkRefUtil.hpp"
29 #include "vkQueryUtil.hpp"
30 #include "vkImageWithMemory.hpp"
31 #include "vkBufferWithMemory.hpp"
32 #include "vkTypeUtil.hpp"
33 #include "vkObjUtil.hpp"
34 #include "vkBarrierUtil.hpp"
35 #include "vkCmdUtil.hpp"
36 #include "vkBuilderUtil.hpp"
37 #include "vkRayTracingUtil.hpp"
38
39 #include "deUniquePtr.hpp"
40 #include "deSTLUtil.hpp"
41 #include "deStringUtil.hpp"
42
43 #include <vector>
44 #include <algorithm>
45 #include <iterator>
46 #include <set>
47 #include <sstream>
48 #include <limits>
49
50 namespace vkt
51 {
52 namespace BindingModel
53 {
54
55 namespace
56 {
57
58 using namespace vk;
59
60 deUint32 getDescriptorNumericValue (deUint32 iteration, deUint32 bindingIdx, deUint32 descriptorIdx = 0u)
61 {
62         // When assigning numeric values for the descriptor contents, each descriptor will get 0x5aIIBBDD. II is an octed containing the
63         // iteration index. BB is an octet containing the binding index and DD is the descriptor index inside that binding.
64         constexpr deUint32 kNumericValueBase = 0x5a000000u;
65
66         return (kNumericValueBase | ((iteration & 0xFFu) << 16) | ((bindingIdx & 0xFFu) << 8) | (descriptorIdx & 0xFFu));
67 }
68
69 deUint16 getAccelerationStructureOffsetX (deUint32 descriptorNumericValue)
70 {
71         // Keep the lowest 16 bits (binding and descriptor idx) as the offset.
72         return static_cast<deUint16>(descriptorNumericValue);
73 }
74
75 // Value that will be stored in the output buffer to signal success reading values.
76 deUint32 getExpectedOutputBufferValue ()
77 {
78         return 2u;
79 }
80
81 // This value will be stored in an image to be sampled when checking descriptors containing samplers alone.
82 deUint32 getExternalSampledImageValue ()
83 {
84         return 0x41322314u;
85 }
86
87 // Value that will be ORed with the descriptor value before writing.
88 deUint32 getStoredValueMask ()
89 {
90         return 0xFF000000u;
91 }
92
93 VkFormat getDescriptorImageFormat ()
94 {
95         return VK_FORMAT_R32_UINT;
96 }
97
98 VkExtent3D getDefaultExtent ()
99 {
100         return makeExtent3D(1u, 1u, 1u);
101 }
102
103 // Convert value to hexadecimal.
104 std::string toHex (deUint32 val)
105 {
106         std::ostringstream s;
107         s << "0x" << std::hex << val << "u";
108         return s.str();
109 }
110
111 // Returns the list of descriptor types that cannot be part of a mutable descriptor.
112 std::vector<VkDescriptorType> getForbiddenMutableTypes ()
113 {
114         return std::vector<VkDescriptorType>
115                 {
116                         VK_DESCRIPTOR_TYPE_MUTABLE_VALVE,
117                         VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
118                         VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
119                         VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
120                 };
121 }
122
123 // Returns the list of descriptor types that are mandatory for the extension.
124 std::vector<VkDescriptorType> getMandatoryMutableTypes ()
125 {
126         return std::vector<VkDescriptorType>
127                 {
128                         VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
129                         VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
130                         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
131                         VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
132                         VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
133                         VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
134                 };
135 }
136
137 // This helps quickly transform a vector of descriptor types into a bitmask, which makes it easier to check some conditions.
138 enum DescriptorTypeFlagBits
139 {
140         DTFB_SAMPLER                    = (1 << 0),
141         DTFB_COMBINED_IMAGE_SAMPLER     = (1 << 1),
142         DTFB_SAMPLED_IMAGE              = (1 << 2),
143         DTFB_STORAGE_IMAGE              = (1 << 3),
144         DTFB_UNIFORM_TEXEL_BUFFER       = (1 << 4),
145         DTFB_STORAGE_TEXEL_BUFFER       = (1 << 5),
146         DTFB_UNIFORM_BUFFER             = (1 << 6),
147         DTFB_STORAGE_BUFFER             = (1 << 7),
148         DTFB_UNIFORM_BUFFER_DYNAMIC     = (1 << 8),
149         DTFB_STORAGE_BUFFER_DYNAMIC     = (1 << 9),
150         DTFB_INPUT_ATTACHMENT           = (1 << 10),
151         DTFB_INLINE_UNIFORM_BLOCK_EXT   = (1 << 11),
152         DTFB_ACCELERATION_STRUCTURE_KHR = (1 << 12),
153         DTFB_ACCELERATION_STRUCTURE_NV  = (1 << 13),
154         DTFB_MUTABLE_VALVE              = (1 << 14),
155 };
156
157 using DescriptorTypeFlags = deUint32;
158
159 // Convert type to its corresponding flag bit.
160 DescriptorTypeFlagBits toDescriptorTypeFlagBit (VkDescriptorType descriptorType)
161 {
162         switch (descriptorType)
163         {
164         case VK_DESCRIPTOR_TYPE_SAMPLER:                        return DTFB_SAMPLER;
165         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:         return DTFB_COMBINED_IMAGE_SAMPLER;
166         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:                  return DTFB_SAMPLED_IMAGE;
167         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:                  return DTFB_STORAGE_IMAGE;
168         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:           return DTFB_UNIFORM_TEXEL_BUFFER;
169         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:           return DTFB_STORAGE_TEXEL_BUFFER;
170         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:                 return DTFB_UNIFORM_BUFFER;
171         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:                 return DTFB_STORAGE_BUFFER;
172         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:         return DTFB_UNIFORM_BUFFER_DYNAMIC;
173         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:         return DTFB_STORAGE_BUFFER_DYNAMIC;
174         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:               return DTFB_INPUT_ATTACHMENT;
175         case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:       return DTFB_INLINE_UNIFORM_BLOCK_EXT;
176         case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:     return DTFB_ACCELERATION_STRUCTURE_KHR;
177         case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:      return DTFB_ACCELERATION_STRUCTURE_NV;
178         case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:                  return DTFB_MUTABLE_VALVE;
179         default: break;
180         }
181
182         // Unreachable.
183         DE_ASSERT(false);
184         return DTFB_SAMPLER;
185 }
186
187 // Convert vector of descriptor types to a bitfield.
188 DescriptorTypeFlags toDescriptorTypeFlags (const std::vector<VkDescriptorType>& types)
189 {
190         DescriptorTypeFlags result = 0u;
191         for (const auto& t : types)
192                 result |= toDescriptorTypeFlagBit(t);
193         return result;
194 }
195
196 // Convert bitfield to vector of descriptor types.
197 std::vector<VkDescriptorType> toDescriptorTypeVector (DescriptorTypeFlags bitfield)
198 {
199         std::vector<VkDescriptorType> result;
200
201         if (bitfield & DTFB_SAMPLER)                     result.push_back(VK_DESCRIPTOR_TYPE_SAMPLER);
202         if (bitfield & DTFB_COMBINED_IMAGE_SAMPLER)      result.push_back(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
203         if (bitfield & DTFB_SAMPLED_IMAGE)               result.push_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
204         if (bitfield & DTFB_STORAGE_IMAGE)               result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
205         if (bitfield & DTFB_UNIFORM_TEXEL_BUFFER)        result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
206         if (bitfield & DTFB_STORAGE_TEXEL_BUFFER)        result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
207         if (bitfield & DTFB_UNIFORM_BUFFER)              result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
208         if (bitfield & DTFB_STORAGE_BUFFER)              result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
209         if (bitfield & DTFB_UNIFORM_BUFFER_DYNAMIC)      result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
210         if (bitfield & DTFB_STORAGE_BUFFER_DYNAMIC)      result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
211         if (bitfield & DTFB_INPUT_ATTACHMENT)            result.push_back(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
212         if (bitfield & DTFB_INLINE_UNIFORM_BLOCK_EXT)    result.push_back(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
213         if (bitfield & DTFB_ACCELERATION_STRUCTURE_KHR)  result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
214         if (bitfield & DTFB_ACCELERATION_STRUCTURE_NV)   result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
215         if (bitfield & DTFB_MUTABLE_VALVE)               result.push_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE);
216
217         return result;
218 }
219
220 // How to create the source set when copying descriptors from another set.
221 // * MUTABLE means to transform bindings into mutable bindings.
222 // * NONMUTABLE means to transform bindings into non-mutable bindings.
223 enum class SourceSetStrategy
224 {
225         MUTABLE = 0,
226         NONMUTABLE,
227         NO_SOURCE,
228 };
229
230 enum class PoolMutableStrategy
231 {
232         KEEP_TYPES = 0,
233         EXPAND_TYPES,
234         NO_TYPES,
235 };
236
237 // Type of information that's present in VkWriteDescriptorSet.
238 enum class WriteType
239 {
240         IMAGE_INFO = 0,
241         BUFFER_INFO,
242         BUFFER_VIEW,
243         ACCELERATION_STRUCTURE_INFO,
244 };
245
246 struct WriteInfo
247 {
248         WriteType writeType;
249         union
250         {
251                 VkDescriptorImageInfo                           imageInfo;
252                 VkDescriptorBufferInfo                          bufferInfo;
253                 VkBufferView                                    bufferView;
254                 VkWriteDescriptorSetAccelerationStructureKHR    asInfo;
255         };
256
257         explicit WriteInfo (const VkDescriptorImageInfo& info_)
258                 : writeType(WriteType::IMAGE_INFO)
259                 , imageInfo(info_)
260         {}
261
262         explicit WriteInfo (const VkDescriptorBufferInfo& info_)
263                 : writeType(WriteType::BUFFER_INFO)
264                 , bufferInfo(info_)
265         {}
266
267         explicit WriteInfo (VkBufferView view_)
268                 : writeType(WriteType::BUFFER_VIEW)
269                 , bufferView(view_)
270         {}
271
272         explicit WriteInfo (const VkWriteDescriptorSetAccelerationStructureKHR& asInfo_)
273                 : writeType(WriteType::ACCELERATION_STRUCTURE_INFO)
274                 , asInfo(asInfo_)
275         {}
276 };
277
278 // Resource backing up a single binding.
279 enum class ResourceType
280 {
281         SAMPLER = 0,
282         IMAGE,
283         COMBINED_IMAGE_SAMPLER,
284         BUFFER,
285         BUFFER_VIEW,
286         ACCELERATION_STRUCTURE,
287 };
288
289 // Type of resource backing up a particular descriptor type.
290 ResourceType toResourceType (VkDescriptorType descriptorType)
291 {
292         ResourceType resourceType = ResourceType::SAMPLER;
293         switch (descriptorType)
294         {
295         case VK_DESCRIPTOR_TYPE_SAMPLER:
296                 resourceType = ResourceType::SAMPLER;
297                 break;
298
299         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
300                 resourceType = ResourceType::COMBINED_IMAGE_SAMPLER;
301                 break;
302
303         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
304         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
305         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
306                 resourceType = ResourceType::IMAGE;
307                 break;
308
309         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
310         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
311                 resourceType = ResourceType::BUFFER_VIEW;
312                 break;
313
314         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
315         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
316                 resourceType = ResourceType::BUFFER;
317                 break;
318
319         case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
320                 resourceType = ResourceType::ACCELERATION_STRUCTURE;
321                 break;
322
323         default:
324                 DE_ASSERT(false);
325                 break;
326         }
327
328         return resourceType;
329 }
330
331 bool isShaderWritable (VkDescriptorType descriptorType)
332 {
333         return (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
334                 descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
335 }
336
337 Move<VkSampler> makeDefaultSampler (const DeviceInterface& vkd, VkDevice device)
338 {
339         const VkSamplerCreateInfo samplerCreateInfo = {
340                 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,  //  VkStructureType                     sType;
341                 nullptr,                                //  const void*                         pNext;
342                 0u,                                     //  VkSamplerCreateFlags        flags;
343                 VK_FILTER_NEAREST,                      //  VkFilter                            magFilter;
344                 VK_FILTER_NEAREST,                      //  VkFilter                            minFilter;
345                 VK_SAMPLER_MIPMAP_MODE_NEAREST,         //  VkSamplerMipmapMode         mipmapMode;
346                 VK_SAMPLER_ADDRESS_MODE_REPEAT,         //  VkSamplerAddressMode        addressModeU;
347                 VK_SAMPLER_ADDRESS_MODE_REPEAT,         //  VkSamplerAddressMode        addressModeV;
348                 VK_SAMPLER_ADDRESS_MODE_REPEAT,         //  VkSamplerAddressMode        addressModeW;
349                 0.f,                                    //  float                                       mipLodBias;
350                 VK_FALSE,                               //  VkBool32                            anisotropyEnable;
351                 1.f,                                    //  float                                       maxAnisotropy;
352                 VK_FALSE,                               //  VkBool32                            compareEnable;
353                 VK_COMPARE_OP_ALWAYS,                   //  VkCompareOp                         compareOp;
354                 0.f,                                    //  float                                       minLod;
355                 0.f,                                    //  float                                       maxLod;
356                 VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,  //  VkBorderColor                       borderColor;
357                 VK_FALSE,                               //  VkBool32                            unnormalizedCoordinates;
358         };
359
360         return createSampler(vkd, device, &samplerCreateInfo);
361 }
362
363 de::MovePtr<ImageWithMemory> makeDefaultImage (const DeviceInterface& vkd, VkDevice device, Allocator& alloc)
364 {
365         const auto              extent     = makeExtent3D(1u, 1u, 1u);
366         const VkImageUsageFlags usageFlags = (
367                 VK_IMAGE_USAGE_SAMPLED_BIT
368                 | VK_IMAGE_USAGE_STORAGE_BIT
369                 | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
370                 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
371                 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT
372                 | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
373
374         const VkImageCreateInfo imageCreateInfo = {
375                 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,    //  VkStructureType                     sType;
376                 nullptr,                                //  const void*                         pNext;
377                 0u,                                     //  VkImageCreateFlags          flags;
378                 VK_IMAGE_TYPE_2D,                       //  VkImageType                         imageType;
379                 getDescriptorImageFormat(),             //  VkFormat                            format;
380                 extent,                                 //  VkExtent3D                          extent;
381                 1u,                                     //  deUint32                            mipLevels;
382                 1u,                                     //  deUint32                            arrayLayers;
383                 VK_SAMPLE_COUNT_1_BIT,                  //  VkSampleCountFlagBits       samples;
384                 VK_IMAGE_TILING_OPTIMAL,                //  VkImageTiling                       tiling;
385                 usageFlags,                             //  VkImageUsageFlags           usage;
386                 VK_SHARING_MODE_EXCLUSIVE,              //  VkSharingMode                       sharingMode;
387                 0u,                                     //  deUint32                            queueFamilyIndexCount;
388                 nullptr,                                //  const deUint32*                     pQueueFamilyIndices;
389                 VK_IMAGE_LAYOUT_UNDEFINED,              //  VkImageLayout                       initialLayout;
390         };
391         return de::MovePtr<ImageWithMemory>(new ImageWithMemory(vkd, device, alloc, imageCreateInfo, MemoryRequirement::Any));
392 }
393
394 Move<VkImageView> makeDefaultImageView (const DeviceInterface& vkd, VkDevice device, VkImage image)
395 {
396         const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
397         return makeImageView(vkd, device, image, VK_IMAGE_VIEW_TYPE_2D, getDescriptorImageFormat(), subresourceRange);
398 }
399
400 de::MovePtr<BufferWithMemory> makeDefaultBuffer (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 numElements = 1u)
401 {
402         const VkBufferUsageFlags bufferUsage = (
403                 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
404                 | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT
405                 | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT
406                 | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
407                 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT
408                 | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
409
410         const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numElements));
411
412         const auto bufferCreateInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
413
414         return de::MovePtr<BufferWithMemory>(new BufferWithMemory(vkd, device, alloc, bufferCreateInfo, MemoryRequirement::HostVisible));
415 }
416
417 Move<VkBufferView> makeDefaultBufferView (const DeviceInterface& vkd, VkDevice device, VkBuffer buffer)
418 {
419         const auto bufferOffset = static_cast<VkDeviceSize>(0);
420         const auto bufferSize   = static_cast<VkDeviceSize>(sizeof(deUint32));
421
422         return makeBufferView(vkd, device, buffer, getDescriptorImageFormat(), bufferOffset, bufferSize);
423 }
424
425 struct AccelerationStructureData
426 {
427         using TLASPtr = de::MovePtr<TopLevelAccelerationStructure>;
428         using BLASPtr = de::MovePtr<BottomLevelAccelerationStructure>;
429
430         TLASPtr tlas;
431         BLASPtr blas;
432
433         void swap (AccelerationStructureData& other)
434         {
435                 auto myTlasPtr = tlas.release();
436                 auto myBlasPtr = blas.release();
437
438                 auto otherTlasPtr = other.tlas.release();
439                 auto otherBlasPtr = other.blas.release();
440
441                 tlas = TLASPtr(otherTlasPtr);
442                 blas = BLASPtr(otherBlasPtr);
443
444                 other.tlas = TLASPtr(myTlasPtr);
445                 other.blas = BLASPtr(myBlasPtr);
446         }
447
448         AccelerationStructureData () : tlas() , blas() {}
449
450         AccelerationStructureData (AccelerationStructureData&& other)
451                 : AccelerationStructureData()
452         {
453                 swap(other);
454         }
455
456         AccelerationStructureData& operator= (AccelerationStructureData&& other)
457         {
458                 swap(other);
459                 return *this;
460         }
461 };
462
463 AccelerationStructureData makeDefaultAccelerationStructure (const DeviceInterface& vkd, VkDevice device, VkCommandBuffer cmdBuffer, Allocator& alloc, bool triangles, deUint16 offsetX)
464 {
465         AccelerationStructureData data;
466
467         // Triangle around (offsetX, 0) with depth 5.0.
468         const float middleX = static_cast<float>(offsetX);
469         const float leftX   = middleX - 0.5f;
470         const float rightX  = middleX + 0.5f;
471         const float topY    = 0.5f;
472         const float bottomY = -0.5f;
473         const float depth   = 5.0f;
474
475         std::vector<tcu::Vec3> vertices;
476
477         if (triangles)
478         {
479                 vertices.reserve(3u);
480                 vertices.emplace_back(middleX, topY, depth);
481                 vertices.emplace_back(rightX, bottomY, depth);
482                 vertices.emplace_back(leftX, bottomY, depth);
483         }
484         else
485         {
486                 vertices.reserve(2u);
487                 vertices.emplace_back(leftX, bottomY, depth);
488                 vertices.emplace_back(rightX, topY, depth);
489         }
490
491         data.tlas = makeTopLevelAccelerationStructure();
492         data.blas = makeBottomLevelAccelerationStructure();
493
494         VkGeometryInstanceFlagsKHR instanceFlags = 0u;
495         if (triangles)
496                 instanceFlags |= VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR;
497
498         data.blas->addGeometry(vertices, triangles, VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR);
499         data.blas->createAndBuild(vkd, device, cmdBuffer, alloc);
500
501         de::SharedPtr<BottomLevelAccelerationStructure> blasSharedPtr (data.blas.release());
502         data.tlas->setInstanceCount(1u);
503         data.tlas->addInstance(blasSharedPtr, identityMatrix3x4, 0u, 0xFFu, 0u, instanceFlags);
504         data.tlas->createAndBuild(vkd, device, cmdBuffer, alloc);
505
506         return data;
507 }
508
509 const auto kShaderAccess = (VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT);
510
511 struct Resource
512 {
513         VkDescriptorType              descriptorType;
514         ResourceType                  resourceType;
515         Move<VkSampler>               sampler;
516         de::MovePtr<ImageWithMemory>  imageWithMemory;
517         Move<VkImageView>             imageView;
518         de::MovePtr<BufferWithMemory> bufferWithMemory;
519         Move<VkBufferView>            bufferView;
520         AccelerationStructureData     asData;
521         deUint32                      initialValue;
522
523         Resource (VkDescriptorType descriptorType_, const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, bool useAABBs, deUint32 initialValue_, deUint32 numElements = 1u)
524                 : descriptorType        (descriptorType_)
525                 , resourceType      (toResourceType(descriptorType))
526                 , sampler           ()
527                 , imageWithMemory   ()
528                 , imageView         ()
529                 , bufferWithMemory  ()
530                 , bufferView        ()
531                 , asData            ()
532                 , initialValue      (initialValue_)
533         {
534                 if (numElements != 1u)
535                         DE_ASSERT(resourceType == ResourceType::BUFFER);
536
537                 switch (resourceType)
538                 {
539                 case ResourceType::SAMPLER:
540                         sampler = makeDefaultSampler(vkd, device);
541                         break;
542
543                 case ResourceType::IMAGE:
544                         imageWithMemory = makeDefaultImage(vkd, device, alloc);
545                         imageView       = makeDefaultImageView(vkd, device, imageWithMemory->get());
546                         break;
547
548                 case ResourceType::COMBINED_IMAGE_SAMPLER:
549                         sampler         = makeDefaultSampler(vkd, device);
550                         imageWithMemory = makeDefaultImage(vkd, device, alloc);
551                         imageView       = makeDefaultImageView(vkd, device, imageWithMemory->get());
552                         break;
553
554                 case ResourceType::BUFFER:
555                         bufferWithMemory = makeDefaultBuffer(vkd, device, alloc, numElements);
556                         break;
557
558                 case ResourceType::BUFFER_VIEW:
559                         bufferWithMemory = makeDefaultBuffer(vkd, device, alloc);
560                         bufferView       = makeDefaultBufferView(vkd, device, bufferWithMemory->get());
561                         break;
562
563                 case ResourceType::ACCELERATION_STRUCTURE:
564                         {
565                                 const auto cmdPool      = makeCommandPool(vkd, device, qIndex);
566                                 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
567                                 const auto cmdBuffer    = cmdBufferPtr.get();
568                                 const bool triangles    = !useAABBs;
569
570                                 beginCommandBuffer(vkd, cmdBuffer);
571                                 asData = makeDefaultAccelerationStructure(vkd, device, cmdBuffer, alloc, triangles, getAccelerationStructureOffsetX(initialValue));
572                                 endCommandBuffer(vkd, cmdBuffer);
573                                 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
574                         }
575                         break;
576
577                 default:
578                         DE_ASSERT(false);
579                         break;
580                 }
581
582                 if (imageWithMemory || bufferWithMemory)
583                 {
584                         const auto cmdPool      = makeCommandPool(vkd, device, qIndex);
585                         const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
586                         const auto cmdBuffer    = cmdBufferPtr.get();
587
588                         if (imageWithMemory)
589                         {
590                                 // Prepare staging buffer.
591                                 const auto               bufferSize        = static_cast<VkDeviceSize>(sizeof(initialValue));
592                                 const VkBufferUsageFlags bufferUsage       = (VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
593                                 const auto               stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
594
595                                 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
596                                 auto& bufferAlloc = stagingBuffer.getAllocation();
597                                 void* bufferData  = bufferAlloc.getHostPtr();
598
599                                 deMemcpy(bufferData, &initialValue, sizeof(initialValue));
600                                 flushAlloc(vkd, device, bufferAlloc);
601
602                                 beginCommandBuffer(vkd, cmdBuffer);
603
604                                 // Transition and copy image.
605                                 const auto copyRegion         = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
606                                                                                                                                         makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
607
608                                 // Switch image to TRANSFER_DST_OPTIMAL before copying data to it.
609                                 const auto subresourceRange   = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
610
611                                 const auto preTransferBarrier = makeImageMemoryBarrier(
612                                         0u, VK_ACCESS_TRANSFER_WRITE_BIT,
613                                         VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
614                                         imageWithMemory->get(), subresourceRange);
615
616                                 vkd.cmdPipelineBarrier(
617                                         cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
618                                         0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
619
620                                 // Copy data to image.
621                                 vkd.cmdCopyBufferToImage(cmdBuffer, stagingBuffer.get(), imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
622
623                                 // Switch image to the GENERAL layout before reading or writing to it from shaders.
624                                 const auto postTransferBarrier = makeImageMemoryBarrier(
625                                         VK_ACCESS_TRANSFER_WRITE_BIT, kShaderAccess,
626                                         VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
627                                         imageWithMemory->get(), subresourceRange);
628
629                                 vkd.cmdPipelineBarrier(
630                                         cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
631                                         0u, nullptr, 0u, nullptr, 1u, &postTransferBarrier);
632
633                                 endCommandBuffer(vkd, cmdBuffer);
634                                 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
635                         }
636
637                         if (bufferWithMemory)
638                         {
639                                 auto& bufferAlloc = bufferWithMemory->getAllocation();
640                                 void* bufferData  = bufferAlloc.getHostPtr();
641
642                                 const std::vector<deUint32> bufferValues(numElements, initialValue);
643                                 deMemcpy(bufferData, bufferValues.data(), de::dataSize(bufferValues));
644                                 flushAlloc(vkd, device, bufferAlloc);
645
646                                 beginCommandBuffer(vkd, cmdBuffer);
647
648                                 // Make sure host writes happen before shader reads/writes. Note: this barrier is not needed in theory.
649                                 const auto hostToShaderBarrier = makeMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, kShaderAccess);
650
651                                 vkd.cmdPipelineBarrier(
652                                         cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
653                                         1u, &hostToShaderBarrier, 0u, nullptr, 0u, nullptr);
654
655                                 endCommandBuffer(vkd, cmdBuffer);
656                                 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
657                         }
658                 }
659         }
660
661         // Remove problematic copy constructor.
662         Resource (const Resource&) = delete;
663
664         // Make it movable.
665         Resource (Resource&& other) noexcept
666                 : descriptorType        (other.descriptorType)
667                 , resourceType      (other.resourceType)
668                 , sampler           (other.sampler)
669                 , imageWithMemory   (other.imageWithMemory.release())
670                 , imageView         (other.imageView)
671                 , bufferWithMemory  (other.bufferWithMemory.release())
672                 , bufferView        (other.bufferView)
673                 , asData                        (std::move(other.asData))
674                 , initialValue      (other.initialValue)
675         {}
676
677         ~Resource ()
678         {}
679
680         WriteInfo makeWriteInfo () const
681         {
682                 using WriteInfoPtr = de::MovePtr<WriteInfo>;
683
684                 WriteInfoPtr writeInfo;
685
686                 switch (resourceType)
687                 {
688                 case ResourceType::SAMPLER:
689                         {
690                                 const VkDescriptorImageInfo imageInfo = { sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_UNDEFINED };
691                                 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
692                         }
693                         break;
694
695                 case ResourceType::IMAGE:
696                         {
697                                 const VkDescriptorImageInfo imageInfo = { DE_NULL, imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
698                                 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
699                         }
700                         break;
701
702                 case ResourceType::COMBINED_IMAGE_SAMPLER:
703                         {
704                                 const VkDescriptorImageInfo imageInfo = { sampler.get(), imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
705                                 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
706                         }
707                         break;
708
709                 case ResourceType::BUFFER:
710                         {
711                                 const VkDescriptorBufferInfo bufferInfo = { bufferWithMemory->get(), 0ull, static_cast<VkDeviceSize>(sizeof(deUint32)) };
712                                 writeInfo = WriteInfoPtr (new WriteInfo(bufferInfo));
713                         }
714                         break;
715
716                 case ResourceType::BUFFER_VIEW:
717                         writeInfo = WriteInfoPtr (new WriteInfo(bufferView.get()));
718                         break;
719
720                 case ResourceType::ACCELERATION_STRUCTURE:
721                         {
722                                 VkWriteDescriptorSetAccelerationStructureKHR asWrite = initVulkanStructure();
723                                 asWrite.accelerationStructureCount = 1u;
724                                 asWrite.pAccelerationStructures    = asData.tlas.get()->getPtr();
725                                 writeInfo = WriteInfoPtr (new WriteInfo(asWrite));
726                         }
727                         break;
728
729                 default:
730                         DE_ASSERT(false);
731                         break;
732                 }
733
734                 return *writeInfo;
735         }
736
737         tcu::Maybe<deUint32> getStoredValue (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 position = 0u) const
738         {
739                 if (position != 0u)
740                         DE_ASSERT(static_cast<bool>(bufferWithMemory));
741
742                 if (imageWithMemory || bufferWithMemory)
743                 {
744                         // Command pool and buffer.
745                         const auto cmdPool      = makeCommandPool(vkd, device, qIndex);
746                         const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
747                         const auto cmdBuffer    = cmdBufferPtr.get();
748
749                         if (imageWithMemory)
750                         {
751                                 // Prepare staging buffer.
752                                 deUint32                 result;
753                                 const auto               bufferSize        = static_cast<VkDeviceSize>(sizeof(result));
754                                 const VkBufferUsageFlags bufferUsage       = (VK_BUFFER_USAGE_TRANSFER_DST_BIT);
755                                 const auto               stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
756
757                                 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
758                                 auto& bufferAlloc = stagingBuffer.getAllocation();
759                                 void* bufferData  = bufferAlloc.getHostPtr();
760
761                                 // Copy image value to staging buffer.
762                                 beginCommandBuffer(vkd, cmdBuffer);
763
764                                 // Make sure shader accesses happen before transfers and prepare image for transfer.
765                                 const auto colorResourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
766
767                                 const auto preTransferBarrier = makeImageMemoryBarrier(
768                                         kShaderAccess, VK_ACCESS_TRANSFER_READ_BIT,
769                                         VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
770                                         imageWithMemory->get(), colorResourceRange);
771
772                                 vkd.cmdPipelineBarrier(
773                                         cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
774                                         0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
775
776                                 // Copy image contents to staging buffer.
777                                 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
778                                                                                                                         makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
779                                 vkd.cmdCopyImageToBuffer(cmdBuffer, imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, stagingBuffer.get(), 1u, &copyRegion);
780
781                                 // Make sure writes are visible from the host.
782                                 const auto postTransferBarrier = makeMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT);
783                                 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u, &postTransferBarrier, 0u, nullptr, 0u, nullptr);
784
785                                 endCommandBuffer(vkd, cmdBuffer);
786                                 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
787
788                                 // Get value from staging buffer.
789                                 invalidateAlloc(vkd, device, bufferAlloc);
790                                 deMemcpy(&result, bufferData, sizeof(result));
791                                 return tcu::just(result);
792                         }
793
794                         if (bufferWithMemory)
795                         {
796                                 auto&       bufferAlloc = bufferWithMemory->getAllocation();
797                                 auto        bufferData  = reinterpret_cast<const char*>(bufferAlloc.getHostPtr());
798                                 deUint32    result;
799
800                                 // Make sure shader writes are visible from the host.
801                                 beginCommandBuffer(vkd, cmdBuffer);
802
803                                 const auto shaderToHostBarrier = makeMemoryBarrier(kShaderAccess, VK_ACCESS_HOST_READ_BIT);
804                                 vkd.cmdPipelineBarrier(
805                                         cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u,
806                                         1u, &shaderToHostBarrier, 0u, nullptr, 0u, nullptr);
807
808                                 endCommandBuffer(vkd, cmdBuffer);
809                                 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
810
811                                 invalidateAlloc(vkd, device, bufferAlloc);
812                                 deMemcpy(&result, bufferData + sizeof(deUint32) * static_cast<size_t>(position), sizeof(result));
813                                 return tcu::just(result);
814                         }
815                 }
816
817                 return tcu::Nothing;
818         }
819 };
820
821 struct BindingInterface
822 {
823         virtual ~BindingInterface () {}
824
825         // Minimum number of iterations to test all mutable types.
826         virtual deUint32 maxTypes () const = 0;
827
828         // Types that will be used by the binding at a given iteration.
829         virtual std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const = 0;
830
831         // Binding's main type.
832         virtual VkDescriptorType mainType () const = 0;
833
834         // Binding's list of mutable types, if present.
835         virtual std::vector<VkDescriptorType> mutableTypes () const = 0;
836
837         // Descriptor count in the binding.
838         virtual size_t size () const = 0;
839
840         // Is the binding an array binding?
841         virtual bool isArray () const = 0;
842
843         // Is the binding an unbounded array?
844         virtual bool isUnbounded () const = 0;
845
846         // Will the binding use different descriptor types in a given iteration?
847         virtual bool needsAliasing (deUint32 iteration) const
848         {
849                 const auto                 typesVec = typesAtIteration(iteration);
850                 std::set<VkDescriptorType> descTypes(begin(typesVec), end(typesVec));
851                 return (descTypes.size() > 1u);
852         }
853
854         // Will the binding need aliasing on any iteration up to a given number?
855         virtual bool needsAliasingUpTo (deUint32 numIterations) const
856         {
857                 std::vector<bool> needsAliasingFlags;
858                 needsAliasingFlags.reserve(numIterations);
859
860                 for (deUint32 iter = 0u; iter < numIterations; ++iter)
861                         needsAliasingFlags.push_back(needsAliasing(iter));
862
863                 return std::any_of(begin(needsAliasingFlags), end(needsAliasingFlags), [] (bool f) { return f; });
864         }
865
866 private:
867         virtual bool hasDescriptorType (deUint32 iteration, VkDescriptorType descriptorType) const
868         {
869                 const auto typesVec = typesAtIteration(iteration);
870                 return (std::find(begin(typesVec), end(typesVec), descriptorType) != end(typesVec));
871         }
872
873 public:
874         // Convert one particular binding to a mutable or non-mutable equivalent binding, returning the equivalent binding.
875         virtual de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const = 0;
876         virtual de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const = 0;
877
878         // Create resources needed to back up this binding.
879         virtual std::vector<Resource> createResources (
880                 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
881                 deUint32 iteration, bool useAABBs, deUint32 baseValue) const = 0;
882
883         // Get GLSL binding declarations. Note: no array size means no array, if size is < 0 it means unbounded array.
884         virtual std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const = 0;
885
886         // Get GLSL statements to check this binding.
887         virtual std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const = 0;
888 };
889
890 // Represents a single binding that will be used in a test.
891 class SingleBinding : public BindingInterface
892 {
893 private:
894         VkDescriptorType              type;             // The descriptor type.
895         std::vector<VkDescriptorType> mutableTypesVec;  // The types that will be used for each iteration of a test if mutable.
896
897 public:
898         SingleBinding (VkDescriptorType type_, std::vector<VkDescriptorType> mutableTypes_)
899                 : type              (type_)
900                 , mutableTypesVec   (std::move(mutableTypes_))
901         {
902                 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
903                 const auto        kBeginForbidden        = begin(kForbiddenMutableTypes);
904                 const auto        kEndForbidden          = end(kForbiddenMutableTypes);
905
906                 // For release builds.
907                 DE_UNREF(kBeginForbidden);
908                 DE_UNREF(kEndForbidden);
909
910                 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
911                 {
912                         DE_ASSERT(mutableTypesVec.empty());
913                 }
914                 else
915                 {
916                         DE_ASSERT(!mutableTypesVec.empty());
917                         DE_ASSERT(std::none_of(begin(mutableTypesVec), end(mutableTypesVec),
918                                                [&kBeginForbidden, &kEndForbidden] (VkDescriptorType t) -> bool {
919                                                        return std::find(kBeginForbidden, kEndForbidden, t) != kEndForbidden;
920                                                }));
921                 }
922         }
923
924         deUint32 maxTypes () const override
925         {
926                 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
927                         return 1u;
928                 const auto vecSize = mutableTypesVec.size();
929                 DE_ASSERT(vecSize <= std::numeric_limits<deUint32>::max());
930                 return static_cast<deUint32>(vecSize);
931         }
932
933         VkDescriptorType typeAtIteration (deUint32 iteration) const
934         {
935                 return typesAtIteration(iteration)[0];
936         }
937
938         std::vector<VkDescriptorType> usedTypes () const
939         {
940                 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
941                         return std::vector<VkDescriptorType>(1u, type);
942                 return mutableTypesVec;
943         }
944
945         std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
946         {
947                 const auto typesVec = usedTypes();
948                 return std::vector<VkDescriptorType>(1u, typesVec[static_cast<size_t>(iteration) % typesVec.size()]);
949         }
950
951         VkDescriptorType mainType () const override
952         {
953                 return type;
954         }
955
956         std::vector<VkDescriptorType> mutableTypes () const override
957         {
958                 return mutableTypesVec;
959         }
960
961         size_t size () const override
962         {
963                 return size_t{1u};
964         }
965
966         bool isArray () const override
967         {
968                 return false;
969         }
970
971         bool isUnbounded () const override
972         {
973                 return false;
974         }
975
976         de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
977         {
978                 DE_UNREF(iteration);
979
980                 static const auto kMandatoryMutableTypeFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
981                 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
982                 {
983                         const auto descFlags = toDescriptorTypeFlags(mutableTypesVec);
984                         return de::MovePtr<BindingInterface>(new SingleBinding(type, toDescriptorTypeVector(descFlags)));
985                 }
986
987                 // Make sure it's not a forbidden mutable type.
988                 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
989                 DE_ASSERT(std::find(begin(kForbiddenMutableTypes), end(kForbiddenMutableTypes), type) == end(kForbiddenMutableTypes));
990
991                 // Convert the binding to mutable using a wider set of descriptor types if possible, including the binding type.
992                 const auto descFlags = (kMandatoryMutableTypeFlags | toDescriptorTypeFlagBit(type));
993
994                 return de::MovePtr<BindingInterface>(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, toDescriptorTypeVector(descFlags)));
995         }
996
997         de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
998         {
999                 return de::MovePtr<BindingInterface>(new SingleBinding(typeAtIteration(iteration), std::vector<VkDescriptorType>()));
1000         }
1001
1002         std::vector<Resource> createResources (
1003                 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1004                 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1005         {
1006                 const auto descriptorType = typeAtIteration(iteration);
1007
1008                 std::vector<Resource> resources;
1009                 resources.emplace_back(descriptorType, vkd, device, alloc, qIndex, queue, useAABBs, baseValue);
1010                 return resources;
1011         }
1012
1013         std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1014         {
1015                 const auto         descriptorType = typeAtIteration(iteration);
1016                 const std::string  arraySuffix    = ((static_cast<bool>(arraySize)) ? ((arraySize.get() < 0) ? "[]" : ("[" + de::toString(arraySize.get()) + "]")) : "");
1017                 const std::string  layoutAttribs  = "set=" + de::toString(setNum) + ", binding=" + de::toString(bindingNum);
1018                 const std::string  bindingSuffix  = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1019                 const std::string  nameSuffix     = bindingSuffix + arraySuffix;
1020                 std::ostringstream declarations;
1021
1022                 declarations << "layout (";
1023
1024                 switch (descriptorType)
1025                 {
1026                 case VK_DESCRIPTOR_TYPE_SAMPLER:
1027                         declarations << layoutAttribs << ") uniform sampler sampler" << nameSuffix;
1028                         break;
1029
1030                 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1031                         declarations << layoutAttribs << ") uniform usampler2D combinedSampler" << nameSuffix;
1032                         break;
1033
1034                 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1035                         declarations << layoutAttribs << ") uniform utexture2D sampledImage" << nameSuffix;
1036                         break;
1037
1038                 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1039                         declarations << layoutAttribs << ") uniform uboBlock" << bindingSuffix << " { uint val; } ubo" << nameSuffix;
1040                         break;
1041
1042                 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1043                         declarations << layoutAttribs << ") buffer sboBlock" << bindingSuffix << " { uint val; } ssbo" << nameSuffix;
1044                         break;
1045
1046                 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1047                         declarations << layoutAttribs << ") uniform utextureBuffer uniformTexel" << nameSuffix;
1048                         break;
1049
1050                 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1051                         declarations << layoutAttribs << ", r32ui) uniform uimageBuffer storageTexel" << nameSuffix;
1052                         break;
1053
1054                 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1055                         declarations << layoutAttribs << ", r32ui) uniform uimage2D storageImage" << nameSuffix;
1056                         break;
1057
1058                 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1059                         declarations << layoutAttribs << ", input_attachment_index=" << inputAttachmentIdx << ") uniform usubpassInput inputAttachment" << nameSuffix;
1060                         break;
1061
1062                 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1063                         declarations << layoutAttribs << ") uniform accelerationStructureEXT accelerationStructure" << nameSuffix;
1064                         break;
1065
1066                 default:
1067                         DE_ASSERT(false);
1068                         break;
1069                 }
1070
1071                 declarations << ";\n";
1072
1073                 return declarations.str();
1074         }
1075
1076         std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1077         {
1078                 const auto        descriptorType = typeAtIteration(iteration);
1079                 const std::string bindingSuffix  = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1080
1081                 std::string indexSuffix;
1082                 if (arrayIndex)
1083                 {
1084                         indexSuffix = de::toString(arrayIndex.get());
1085                         if (usePushConstants)
1086                                 indexSuffix += " + pc.zero";
1087                         indexSuffix = "[" + indexSuffix + "]";
1088                 }
1089
1090                 const std::string nameSuffix         = bindingSuffix + indexSuffix;
1091                 const std::string baseValue          = toHex(baseValue_);
1092                 const std::string externalImageValue = toHex(getExternalSampledImageValue());
1093                 const std::string mask               = toHex(getStoredValueMask());
1094
1095                 std::ostringstream checks;
1096
1097                 // Note: all of these depend on an external anyError uint variable.
1098                 switch (descriptorType)
1099                 {
1100                 case VK_DESCRIPTOR_TYPE_SAMPLER:
1101                         // Note this depends on an "externalSampledImage" binding.
1102                         checks << "    {\n";
1103                         checks << "      uint readValue = texture(usampler2D(externalSampledImage, sampler" << nameSuffix << "), vec2(0, 0)).r;\n";
1104                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1105                         checks << "      anyError |= ((readValue == " << externalImageValue << ") ? 0u : 1u);\n";
1106                         //checks << "      anyError = readValue;\n";
1107                         checks << "    }\n";
1108                         break;
1109
1110                 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1111                         checks << "    {\n";
1112                         checks << "      uint readValue = texture(combinedSampler" << nameSuffix << ", vec2(0, 0)).r;\n";
1113                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1114                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1115                         //checks << "      anyError = readValue;\n";
1116                         checks << "    }\n";
1117                         break;
1118
1119                 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1120                         // Note this depends on an "externalSampler" binding.
1121                         checks << "    {\n";
1122                         checks << "      uint readValue = texture(usampler2D(sampledImage" << nameSuffix << ", externalSampler), vec2(0, 0)).r;\n";
1123                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1124                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1125                         //checks << "      anyError = readValue;\n";
1126                         checks << "    }\n";
1127                         break;
1128
1129                 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1130                         checks << "    {\n";
1131                         checks << "      uint readValue = ubo" << nameSuffix << ".val;\n";
1132                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1133                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1134                         //checks << "      anyError = readValue;\n";
1135                         checks << "    }\n";
1136                         break;
1137
1138                 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1139                         checks << "    {\n";
1140                         checks << "      uint readValue = ssbo" << nameSuffix << ".val;\n";
1141                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1142                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1143                         //checks << "      anyError = readValue;\n";
1144                         // Check writes.
1145                         checks << "      ssbo" << nameSuffix << ".val = (readValue | " << mask << ");\n";
1146                         checks << "    }\n";
1147                         break;
1148
1149                 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1150                         checks << "    {\n";
1151                         checks << "      uint readValue = texelFetch(uniformTexel" << nameSuffix << ", 0).x;\n";
1152                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1153                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1154                         //checks << "      anyError = readValue;\n";
1155                         checks << "    }\n";
1156                         break;
1157
1158                 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1159                         checks << "    {\n";
1160                         checks << "      uint readValue = imageLoad(storageTexel" << nameSuffix << ", 0).x;\n";
1161                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1162                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1163                         //checks << "      anyError = readValue;\n";
1164                         checks << "      readValue |= " << mask << ";\n";
1165                         // Check writes.
1166                         checks << "      imageStore(storageTexel" << nameSuffix << ", 0, uvec4(readValue, 0, 0, 0));\n";
1167                         checks << "    }\n";
1168                         break;
1169
1170                 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1171                         checks << "    {\n";
1172                         checks << "      uint readValue = imageLoad(storageImage" << nameSuffix << ", ivec2(0, 0)).x;\n";
1173                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1174                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1175                         //checks << "      anyError = readValue;\n";
1176                         checks << "      readValue |= " << mask << ";\n";
1177                         // Check writes.
1178                         checks << "      imageStore(storageImage" << nameSuffix << ", ivec2(0, 0), uvec4(readValue, 0, 0, 0));\n";
1179                         checks << "    }\n";
1180                         break;
1181
1182                 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1183                         checks << "    {\n";
1184                         checks << "      uint readValue = subpassLoad(inputAttachment" << nameSuffix << ").x;\n";
1185                         checks << "      debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1186                         checks << "      anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1187                         //checks << "      anyError = readValue;\n";
1188                         checks << "    }\n";
1189                         break;
1190
1191                 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1192                         checks << "    {\n";
1193                         checks << "      const uint cullMask = 0xFF;\n";
1194                         checks << "      const vec3 origin = vec3(" << getAccelerationStructureOffsetX(baseValue_) << ".0, 0.0, 0.0);\n";
1195                         checks << "      const vec3 direction = vec3(0.0, 0.0, 1.0);\n";
1196                         checks << "      const float tmin = 1.0;\n";
1197                         checks << "      const float tmax = 10.0;\n";
1198                         checks << "      uint candidateFound = 0u;\n";
1199                         checks << "      rayQueryEXT rq;\n";
1200                         checks << "      rayQueryInitializeEXT(rq, accelerationStructure" << nameSuffix << ", gl_RayFlagsNoneEXT, cullMask, origin, tmin, direction, tmax);\n";
1201                         checks << "      while (rayQueryProceedEXT(rq)) {\n";
1202                         checks << "        const uint candidateType = rayQueryGetIntersectionTypeEXT(rq, false);\n";
1203                         checks << "        if (candidateType == gl_RayQueryCandidateIntersectionTriangleEXT || candidateType == gl_RayQueryCandidateIntersectionAABBEXT) {\n";
1204                         checks << "          candidateFound = 1u;\n";
1205                         checks << "        }\n";
1206                         checks << "      }\n";
1207                         checks << "      anyError |= ((candidateFound == 1u) ? 0u : 1u);\n";
1208                         checks << "    }\n";
1209                         break;
1210
1211                 default:
1212                         DE_ASSERT(false);
1213                         break;
1214                 }
1215
1216                 return checks.str();
1217         }
1218 };
1219
1220 // Represents an array of bindings. Individual bindings are stored as SingleBindings because each one of them may take a different
1221 // type in each iteration (i.e. they can all have different descriptor type vectors).
1222 class ArrayBinding : public BindingInterface
1223 {
1224 private:
1225         bool                       unbounded;
1226         std::vector<SingleBinding> bindings;
1227
1228 public:
1229         ArrayBinding (bool unbounded_, std::vector<SingleBinding> bindings_)
1230                 : unbounded (unbounded_)
1231                 , bindings  (std::move(bindings_))
1232         {
1233                 // We need to check all single bindings have the same effective type, even if mutable descriptors have different orders.
1234                 DE_ASSERT(!bindings.empty());
1235
1236                 std::set<VkDescriptorType>    basicTypes;
1237                 std::set<DescriptorTypeFlags> bindingTypes;
1238
1239                 for (const auto& b : bindings)
1240                 {
1241                         basicTypes.insert(b.mainType());
1242                         bindingTypes.insert(toDescriptorTypeFlags(b.usedTypes()));
1243                 }
1244
1245                 DE_ASSERT(basicTypes.size() == 1u);
1246                 DE_ASSERT(bindingTypes.size() == 1u);
1247
1248                 // For release builds.
1249                 DE_UNREF(basicTypes);
1250                 DE_UNREF(bindingTypes);
1251         }
1252
1253         deUint32 maxTypes () const override
1254         {
1255                 // Each binding may have the same effective type but a different number of iterations due to repeated types.
1256                 std::vector<size_t> bindingSizes;
1257                 bindingSizes.reserve(bindings.size());
1258
1259                 std::transform(begin(bindings), end(bindings), std::back_inserter(bindingSizes),
1260                                [] (const SingleBinding& b) { return b.usedTypes().size(); });
1261
1262                 const auto maxElement = std::max_element(begin(bindingSizes), end(bindingSizes));
1263                 DE_ASSERT(maxElement != end(bindingSizes));
1264                 DE_ASSERT(*maxElement <= std::numeric_limits<deUint32>::max());
1265                 return static_cast<deUint32>(*maxElement);
1266         }
1267
1268         std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1269         {
1270                 std::vector<VkDescriptorType> result;
1271                 result.reserve(bindings.size());
1272
1273                 for (const auto& b : bindings)
1274                         result.push_back(b.typeAtIteration(iteration));
1275
1276                 return result;
1277         }
1278
1279         VkDescriptorType mainType () const override
1280         {
1281                 return bindings[0].mainType();
1282         }
1283
1284         std::vector<VkDescriptorType> mutableTypes () const override
1285         {
1286                 return bindings[0].mutableTypes();
1287         }
1288
1289         size_t size () const override
1290         {
1291                 return bindings.size();
1292         }
1293
1294         bool isArray () const override
1295         {
1296                 return true;
1297         }
1298
1299         bool isUnbounded () const override
1300         {
1301                 return unbounded;
1302         }
1303
1304         de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1305         {
1306                 // Replicate the first binding once converted, as all are equivalent.
1307                 const auto                       firstBindingPtr = bindings[0].toMutable(iteration);
1308                 const auto                       firstBinding    = *dynamic_cast<SingleBinding*>(firstBindingPtr.get());
1309                 const std::vector<SingleBinding> newBindings     (bindings.size(), firstBinding);
1310
1311                 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1312         }
1313
1314         de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1315         {
1316                 // Make sure this binding can be converted to nonmutable for a given iteration.
1317                 DE_ASSERT(!needsAliasing(iteration));
1318
1319                 // We could use each SingleBinding's toNonMutable(), but this is the same.
1320                 const auto                       descType       = bindings[0].typeAtIteration(iteration);
1321                 const SingleBinding              firstBinding   (descType, std::vector<VkDescriptorType>());
1322                 const std::vector<SingleBinding> newBindings    (bindings.size(), firstBinding);
1323
1324                 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1325         }
1326
1327         std::vector<Resource> createResources (
1328                 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1329                 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1330         {
1331                 std::vector<Resource> resources;
1332                 const auto            numBindings = static_cast<deUint32>(bindings.size());
1333
1334                 for (deUint32 i = 0u; i < numBindings; ++i)
1335                 {
1336                         auto resourceVec = bindings[i].createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, baseValue + i);
1337                         resources.emplace_back(std::move(resourceVec[0]));
1338                 }
1339
1340                 return resources;
1341         }
1342
1343         // We will ignore the array size parameter.
1344         std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1345         {
1346                 const auto descriptorCount = bindings.size();
1347                 const auto arraySizeVal    = (isUnbounded() ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(descriptorCount)));
1348
1349                 DE_UNREF(arraySize);
1350                 DE_ASSERT(descriptorCount < static_cast<size_t>(std::numeric_limits<deInt32>::max()));
1351
1352                 // Maybe a single declaration is enough.
1353                 if (!needsAliasing(iteration))
1354                         return bindings[0].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1355
1356                 // Aliasing needed. Avoid reusing types.
1357                 const auto                 descriptorTypes = typesAtIteration(iteration);
1358                 std::set<VkDescriptorType> usedTypes;
1359                 std::ostringstream         declarations;
1360
1361                 for (size_t descriptorIdx = 0u; descriptorIdx < descriptorCount; ++descriptorIdx)
1362                 {
1363                         const auto& descriptorType = descriptorTypes[descriptorIdx];
1364                         if (usedTypes.count(descriptorType) > 0)
1365                                 continue;
1366
1367                         usedTypes.insert(descriptorType);
1368                         declarations << bindings[descriptorIdx].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1369                 }
1370
1371                 return declarations.str();
1372         }
1373
1374         std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1375         {
1376                 DE_ASSERT(!arrayIndex);
1377                 DE_UNREF(arrayIndex); // For release builds.
1378
1379                 std::ostringstream checks;
1380                 const auto         numDescriptors = static_cast<deUint32>(bindings.size());
1381
1382                 for (deUint32 descriptorIdx  = 0u; descriptorIdx < numDescriptors; ++descriptorIdx)
1383                 {
1384                         const auto& binding = bindings[descriptorIdx];
1385                         checks << binding.glslCheckStatements(iteration, setNum, bindingNum, baseValue_ + descriptorIdx, tcu::just(descriptorIdx), usePushConstants);
1386                 }
1387
1388                 return checks.str();
1389         }
1390 };
1391
1392 class DescriptorSet;
1393
1394 using DescriptorSetPtr = de::SharedPtr<DescriptorSet>;
1395
1396 class DescriptorSet
1397 {
1398 public:
1399         using BindingInterfacePtr   = de::MovePtr<BindingInterface>;
1400         using BindingPtrVector      = std::vector<BindingInterfacePtr>;
1401
1402 private:
1403         BindingPtrVector bindings;
1404
1405 public:
1406         explicit DescriptorSet (BindingPtrVector& bindings_)
1407                 : bindings(std::move(bindings_))
1408         {
1409                 DE_ASSERT(!bindings.empty());
1410         }
1411
1412         size_t numBindings () const
1413         {
1414                 return bindings.size();
1415         }
1416
1417         const BindingInterface* getBinding (size_t bindingIdx) const
1418         {
1419                 return bindings.at(bindingIdx).get();
1420         }
1421
1422         // Maximum number of descriptor types used by any binding in the set.
1423         deUint32 maxTypes () const
1424         {
1425                 std::vector<deUint32> maxSizes;
1426                 maxSizes.reserve(bindings.size());
1427
1428                 std::transform(begin(bindings), end(bindings), std::back_inserter(maxSizes),
1429                                [] (const BindingInterfacePtr& b) { return b->maxTypes(); });
1430
1431                 const auto maxElement = std::max_element(begin(maxSizes), end(maxSizes));
1432                 DE_ASSERT(maxElement != end(maxSizes));
1433                 return *maxElement;
1434         }
1435
1436         // Create another descriptor set that can be the source for copies when setting descriptor values.
1437         DescriptorSetPtr genSourceSet (SourceSetStrategy strategy, deUint32 iteration) const
1438         {
1439                 BindingPtrVector newBindings;
1440                 for (const auto& b : bindings)
1441                 {
1442                         if (strategy == SourceSetStrategy::MUTABLE)
1443                                 newBindings.push_back(b->toMutable(iteration));
1444                         else
1445                                 newBindings.push_back(b->toNonMutable(iteration));
1446                 }
1447
1448                 return DescriptorSetPtr(new DescriptorSet(newBindings));
1449         }
1450
1451         // Makes a descriptor pool that can be used when allocating descriptors for this set.
1452         Move<VkDescriptorPool> makeDescriptorPool (const DeviceInterface& vkd, VkDevice device, PoolMutableStrategy strategy, VkDescriptorPoolCreateFlags flags) const
1453         {
1454                 std::vector<VkDescriptorPoolSize>             poolSizes;
1455                 std::vector<std::vector<VkDescriptorType>>    mutableTypesVec;
1456                 std::vector<VkMutableDescriptorTypeListVALVE> mutableTypeLists;
1457
1458                 // Make vector element addresses stable.
1459                 const auto bindingCount = numBindings();
1460                 poolSizes.reserve(bindingCount);
1461                 mutableTypesVec.reserve(bindingCount);
1462                 mutableTypeLists.reserve(bindingCount);
1463
1464                 for (const auto& b : bindings)
1465                 {
1466                         const auto                 mainType = b->mainType();
1467                         const VkDescriptorPoolSize poolSize = {
1468                                 mainType,
1469                                 static_cast<deUint32>(b->size()),
1470                         };
1471                         poolSizes.push_back(poolSize);
1472
1473                         if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1474                         {
1475                                 if (mainType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
1476                                 {
1477                                         if (strategy == PoolMutableStrategy::KEEP_TYPES)
1478                                         {
1479                                                 mutableTypesVec.emplace_back(b->mutableTypes());
1480                                         }
1481                                         else
1482                                         {
1483                                                 // Expand the type list with the mandatory types.
1484                                                 static const auto mandatoryTypesFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1485                                                 const auto        bindingTypes        = toDescriptorTypeVector(mandatoryTypesFlags | toDescriptorTypeFlags(b->mutableTypes()));
1486
1487                                                 mutableTypesVec.emplace_back(bindingTypes);
1488                                         }
1489
1490                                         const auto& lastVec = mutableTypesVec.back();
1491                                         const VkMutableDescriptorTypeListVALVE typeList = { static_cast<deUint32>(lastVec.size()), de::dataOrNull(lastVec) };
1492                                         mutableTypeLists.push_back(typeList);
1493                                 }
1494                                 else
1495                                 {
1496                                         const VkMutableDescriptorTypeListVALVE typeList = { 0u, nullptr };
1497                                         mutableTypeLists.push_back(typeList);
1498                                 }
1499                         }
1500                         else if (strategy == PoolMutableStrategy::NO_TYPES)
1501                                 ; // Do nothing, we will not use any type list.
1502                         else
1503                                 DE_ASSERT(false);
1504                 }
1505
1506                 VkDescriptorPoolCreateInfo poolCreateInfo = initVulkanStructure();
1507
1508                 poolCreateInfo.maxSets       = 1u;
1509                 poolCreateInfo.flags         = flags;
1510                 poolCreateInfo.poolSizeCount = static_cast<deUint32>(poolSizes.size());
1511                 poolCreateInfo.pPoolSizes    = de::dataOrNull(poolSizes);
1512
1513                 VkMutableDescriptorTypeCreateInfoVALVE mutableInfo = initVulkanStructure();
1514
1515                 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1516                 {
1517                         mutableInfo.mutableDescriptorTypeListCount = static_cast<deUint32>(mutableTypeLists.size());
1518                         mutableInfo.pMutableDescriptorTypeLists    = de::dataOrNull(mutableTypeLists);
1519                         poolCreateInfo.pNext                       = &mutableInfo;
1520                 }
1521
1522                 return createDescriptorPool(vkd, device, &poolCreateInfo);
1523         }
1524
1525 private:
1526         // Building the descriptor set layout create info structure is cumbersome, so we'll reuse the same procedure to check support
1527         // and create the layout. This structure contains the result. "supported" is created as an enum to avoid the Move<> to bool
1528         // conversion cast in the contructors.
1529         struct DescriptorSetLayoutResult
1530         {
1531                 enum class LayoutSupported { NO = 0, YES };
1532
1533                 LayoutSupported             supported;
1534                 Move<VkDescriptorSetLayout> layout;
1535
1536                 explicit DescriptorSetLayoutResult (Move<VkDescriptorSetLayout>&& layout_)
1537                         : supported (LayoutSupported::YES)
1538                         , layout    (layout_)
1539                 {}
1540
1541                 explicit DescriptorSetLayoutResult (LayoutSupported supported_)
1542                         : supported (supported_)
1543                         , layout    ()
1544                 {}
1545         };
1546
1547         DescriptorSetLayoutResult makeOrCheckDescriptorSetLayout (bool checkOnly, const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1548         {
1549                 const auto                                    numIterations = maxTypes();
1550                 std::vector<VkDescriptorSetLayoutBinding>     bindingsVec;
1551                 std::vector<std::vector<VkDescriptorType>>    mutableTypesVec;
1552                 std::vector<VkMutableDescriptorTypeListVALVE> mutableTypeLists;
1553
1554                 // Make vector element addresses stable.
1555                 const auto bindingCount = numBindings();
1556                 bindingsVec.reserve(bindingCount);
1557                 mutableTypesVec.reserve(bindingCount);
1558                 mutableTypeLists.reserve(bindingCount);
1559
1560                 for (size_t bindingIdx = 0u; bindingIdx < bindings.size(); ++bindingIdx)
1561                 {
1562                         const auto& binding = bindings[bindingIdx];
1563                         const auto mainType = binding->mainType();
1564
1565                         const VkDescriptorSetLayoutBinding layoutBinding = {
1566                                 static_cast<deUint32>(bindingIdx),        //    deUint32                        binding;
1567                                 mainType,                                 //    VkDescriptorType        descriptorType;
1568                                 static_cast<deUint32>(binding->size()),   //    deUint32                        descriptorCount;
1569                                 stageFlags,                               //    VkShaderStageFlags      stageFlags;
1570                                 nullptr,                                  //    const VkSampler*        pImmutableSamplers;
1571                         };
1572                         bindingsVec.push_back(layoutBinding);
1573
1574                         // This list may be empty for non-mutable types, which is fine.
1575                         mutableTypesVec.push_back(binding->mutableTypes());
1576                         const auto& lastVec = mutableTypesVec.back();
1577
1578                         const VkMutableDescriptorTypeListVALVE typeList = {
1579                                 static_cast<deUint32>(lastVec.size()),  //  deUint32                            descriptorTypeCount;
1580                                 de::dataOrNull(lastVec),                //  const VkDescriptorType*     pDescriptorTypes;
1581                         };
1582                         mutableTypeLists.push_back(typeList);
1583                 }
1584
1585                 // Make sure to include the variable descriptor count and/or update after bind binding flags.
1586                 const bool        updateAfterBind = ((createFlags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) != 0u);
1587                 bool              lastIsUnbounded = false;
1588                 bool              aliasingNeded   = false;
1589                 std::vector<bool> bindingNeedsAliasing(bindings.size(), false);
1590
1591                 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1592                 {
1593                         if (bindingIdx < bindings.size() - 1)
1594                                 DE_ASSERT(!bindings[bindingIdx]->isUnbounded());
1595                         else
1596                                 lastIsUnbounded = bindings[bindingIdx]->isUnbounded();
1597
1598                         if (bindings[bindingIdx]->needsAliasingUpTo(numIterations))
1599                         {
1600                                 bindingNeedsAliasing[bindingIdx] = true;
1601                                 aliasingNeded = true;
1602                         }
1603                 }
1604
1605                 using FlagsCreateInfoPtr = de::MovePtr<VkDescriptorSetLayoutBindingFlagsCreateInfo>;
1606                 using BindingFlagsVecPtr = de::MovePtr<std::vector<VkDescriptorBindingFlags>>;
1607
1608                 FlagsCreateInfoPtr flagsCreateInfo;
1609                 BindingFlagsVecPtr bindingFlagsVec;
1610
1611                 if (updateAfterBind || lastIsUnbounded || aliasingNeded)
1612                 {
1613                         flagsCreateInfo = FlagsCreateInfoPtr(new VkDescriptorSetLayoutBindingFlagsCreateInfo);
1614                         *flagsCreateInfo = initVulkanStructure();
1615
1616                         bindingFlagsVec = BindingFlagsVecPtr(new std::vector<VkDescriptorBindingFlags>(bindingsVec.size(), (updateAfterBind ? VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT : 0)));
1617                         if (lastIsUnbounded)
1618                                 bindingFlagsVec->back() |= VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT;
1619
1620                         for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1621                         {
1622                                 if (bindingNeedsAliasing[bindingIdx])
1623                                         bindingFlagsVec->at(bindingIdx) |= VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
1624                         }
1625
1626                         flagsCreateInfo->bindingCount  = static_cast<deUint32>(bindingFlagsVec->size());
1627                         flagsCreateInfo->pBindingFlags = de::dataOrNull(*bindingFlagsVec);
1628                 }
1629
1630                 const VkMutableDescriptorTypeCreateInfoVALVE createInfoValve = {
1631                         VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
1632                         flagsCreateInfo.get(),
1633                         static_cast<deUint32>(mutableTypeLists.size()),
1634                         de::dataOrNull(mutableTypeLists),
1635                 };
1636
1637                 const VkDescriptorSetLayoutCreateInfo layoutCreateInfo = {
1638                         VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,    //  VkStructureType                                             sType;
1639                         &createInfoValve,                                       //  const void*                                                 pNext;
1640                         createFlags,                                            //  VkDescriptorSetLayoutCreateFlags    flags;
1641                         static_cast<deUint32>(bindingsVec.size()),              //  deUint32                                                    bindingCount;
1642                         de::dataOrNull(bindingsVec),                            //  const VkDescriptorSetLayoutBinding* pBindings;
1643                 };
1644
1645                 if (checkOnly)
1646                 {
1647                         VkDescriptorSetLayoutSupport support = initVulkanStructure();
1648                         vkd.getDescriptorSetLayoutSupport(device, &layoutCreateInfo, &support);
1649                         DescriptorSetLayoutResult result((support.supported == VK_TRUE) ? DescriptorSetLayoutResult::LayoutSupported::YES
1650                                                                                         : DescriptorSetLayoutResult::LayoutSupported::NO);
1651                         return result;
1652                 }
1653                 else
1654                 {
1655                         DescriptorSetLayoutResult result(createDescriptorSetLayout(vkd, device, &layoutCreateInfo));
1656                         return result;
1657                 }
1658         }
1659
1660 public:
1661         Move<VkDescriptorSetLayout> makeDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1662         {
1663                 return makeOrCheckDescriptorSetLayout(false /*checkOnly*/, vkd, device, stageFlags, createFlags).layout;
1664         }
1665
1666         bool checkDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1667         {
1668                 return (makeOrCheckDescriptorSetLayout(true /*checkOnly*/, vkd, device, stageFlags, createFlags).supported == DescriptorSetLayoutResult::LayoutSupported::YES);
1669         }
1670
1671         size_t numDescriptors () const
1672         {
1673                 size_t total = 0;
1674                 for (const auto& b : bindings)
1675                         total += b->size();
1676                 return total;
1677         }
1678
1679         std::vector<Resource> createResources (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 iteration, bool useAABBs) const
1680         {
1681                 // Create resources for each binding.
1682                 std::vector<Resource> result;
1683                 result.reserve(numDescriptors());
1684
1685                 const auto bindingsCount = static_cast<deUint32>(bindings.size());
1686
1687                 for (deUint32 bindingIdx = 0u; bindingIdx < bindingsCount; ++bindingIdx)
1688                 {
1689                         const auto& binding             = bindings[bindingIdx];
1690                         auto        bindingResources    = binding->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, getDescriptorNumericValue(iteration, bindingIdx));
1691
1692                         for (auto& resource : bindingResources)
1693                                 result.emplace_back(std::move(resource));
1694                 }
1695
1696                 return result;
1697         }
1698
1699         // Updates a descriptor set with the given resources. Note: the set must have been created with a layout that's compatible with this object.
1700         void updateDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet set, deUint32 iteration, const std::vector<Resource>& resources) const
1701         {
1702                 // Make sure the number of resources is correct.
1703                 const auto numResources = resources.size();
1704                 DE_ASSERT(numDescriptors() == numResources);
1705
1706                 std::vector<VkWriteDescriptorSet> descriptorWrites;
1707                 descriptorWrites.reserve(numResources);
1708
1709                 std::vector<VkDescriptorImageInfo>                          imageInfoVec;
1710                 std::vector<VkDescriptorBufferInfo>                         bufferInfoVec;
1711                 std::vector<VkBufferView>                                   bufferViewVec;
1712                 std::vector<VkWriteDescriptorSetAccelerationStructureKHR>   asWriteVec;
1713                 size_t                                                      resourceIdx = 0;
1714
1715                 // We'll be storing pointers to elements of these vectors as we're appending elements, so we need their addresses to be stable.
1716                 imageInfoVec.reserve(numResources);
1717                 bufferInfoVec.reserve(numResources);
1718                 bufferViewVec.reserve(numResources);
1719                 asWriteVec.reserve(numResources);
1720
1721                 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1722                 {
1723                         const auto& binding         = bindings[bindingIdx];
1724                         const auto  descriptorTypes = binding->typesAtIteration(iteration);
1725
1726                         for (size_t descriptorIdx = 0; descriptorIdx < binding->size(); ++descriptorIdx)
1727                         {
1728                                 // Make sure the resource type matches the expected value.
1729                                 const auto& resource       = resources[resourceIdx];
1730                                 const auto& descriptorType = descriptorTypes[descriptorIdx];
1731
1732                                 DE_ASSERT(resource.descriptorType == descriptorType);
1733
1734                                 // Obtain the descriptor write info for the resource.
1735                                 const auto writeInfo = resource.makeWriteInfo();
1736
1737                                 switch (writeInfo.writeType)
1738                                 {
1739                                 case WriteType::IMAGE_INFO:                  imageInfoVec.push_back(writeInfo.imageInfo);   break;
1740                                 case WriteType::BUFFER_INFO:                 bufferInfoVec.push_back(writeInfo.bufferInfo); break;
1741                                 case WriteType::BUFFER_VIEW:                 bufferViewVec.push_back(writeInfo.bufferView); break;
1742                                 case WriteType::ACCELERATION_STRUCTURE_INFO: asWriteVec.push_back(writeInfo.asInfo);        break;
1743                                 default: DE_ASSERT(false); break;
1744                                 }
1745
1746                                 // Add a new VkWriteDescriptorSet struct or extend the last one with more info. This helps us exercise different implementation code paths.
1747                                 bool extended = false;
1748
1749                                 if (!descriptorWrites.empty() && descriptorIdx > 0)
1750                                 {
1751                                         auto& last = descriptorWrites.back();
1752                                         if (last.dstSet == set /* this should always be true */ &&
1753                                             last.dstBinding == bindingIdx && (last.dstArrayElement + last.descriptorCount) == descriptorIdx &&
1754                                             last.descriptorType == descriptorType &&
1755                                             writeInfo.writeType != WriteType::ACCELERATION_STRUCTURE_INFO)
1756                                         {
1757                                                 // The new write should be in the same vector (imageInfoVec, bufferInfoVec or bufferViewVec) so increasing the count works.
1758                                                 ++last.descriptorCount;
1759                                                 extended = true;
1760                                         }
1761                                 }
1762
1763                                 if (!extended)
1764                                 {
1765                                         const VkWriteDescriptorSet write = {
1766                                                 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1767                                                 ((writeInfo.writeType == WriteType::ACCELERATION_STRUCTURE_INFO) ? &asWriteVec.back() : nullptr),
1768                                                 set,
1769                                                 static_cast<deUint32>(bindingIdx),
1770                                                 static_cast<deUint32>(descriptorIdx),
1771                                                 1u,
1772                                                 descriptorType,
1773                                                 (writeInfo.writeType == WriteType::IMAGE_INFO  ? &imageInfoVec.back()  : nullptr),
1774                                                 (writeInfo.writeType == WriteType::BUFFER_INFO ? &bufferInfoVec.back() : nullptr),
1775                                                 (writeInfo.writeType == WriteType::BUFFER_VIEW ? &bufferViewVec.back() : nullptr),
1776                                         };
1777                                         descriptorWrites.push_back(write);
1778                                 }
1779
1780                                 ++resourceIdx;
1781                         }
1782                 }
1783
1784                 // Finally, update descriptor set with all the writes.
1785                 vkd.updateDescriptorSets(device, static_cast<deUint32>(descriptorWrites.size()), de::dataOrNull(descriptorWrites), 0u, nullptr);
1786         }
1787
1788         // Copies between descriptor sets. They must be compatible and related to this set.
1789         void copyDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet srcSet, VkDescriptorSet dstSet) const
1790         {
1791                 std::vector<VkCopyDescriptorSet> copies;
1792
1793                 for (size_t bindingIdx = 0; bindingIdx < numBindings(); ++bindingIdx)
1794                 {
1795                         const auto& binding         = getBinding(bindingIdx);
1796                         const auto  bindingNumber   = static_cast<deUint32>(bindingIdx);
1797                         const auto  descriptorCount = static_cast<deUint32>(binding->size());
1798
1799                         const VkCopyDescriptorSet copy =
1800                         {
1801                                 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
1802                                 nullptr,
1803                                 // set, binding, array element.
1804                                 srcSet, bindingNumber, 0u,
1805                                 dstSet, bindingNumber, 0u,
1806                                 descriptorCount,
1807                         };
1808
1809                         copies.push_back(copy);
1810                 }
1811
1812                 vkd.updateDescriptorSets(device, 0u, nullptr, static_cast<deUint32>(copies.size()), de::dataOrNull(copies));
1813         }
1814
1815         // Does any binding in the set need aliasing in a given iteration?
1816         bool needsAliasing (deUint32 iteration) const
1817         {
1818                 std::vector<bool> aliasingNeededFlags;
1819                 aliasingNeededFlags.reserve(bindings.size());
1820
1821                 std::transform(begin(bindings), end(bindings), std::back_inserter(aliasingNeededFlags),
1822                                [iteration] (const BindingInterfacePtr& b) { return b->needsAliasing(iteration); });
1823                 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1824         }
1825
1826         // Does any binding in the set need aliasing in any iteration?
1827         bool needsAnyAliasing () const
1828         {
1829                 const auto        numIterations       = maxTypes();
1830                 std::vector<bool> aliasingNeededFlags (numIterations, false);
1831
1832                 for (deUint32 iteration = 0; iteration < numIterations; ++iteration)
1833                     aliasingNeededFlags[iteration] = needsAliasing(iteration);
1834
1835                 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1836         }
1837
1838         // Is the last binding an unbounded array?
1839         bool lastBindingIsUnbounded () const
1840         {
1841                 if (bindings.empty())
1842                         return false;
1843                 return bindings.back()->isUnbounded();
1844         }
1845
1846         // Get the variable descriptor count for the last binding if any.
1847         tcu::Maybe<deUint32> getVariableDescriptorCount () const
1848         {
1849                 if (lastBindingIsUnbounded())
1850                         return tcu::just(static_cast<deUint32>(bindings.back()->size()));
1851                 return tcu::Nothing;
1852         }
1853
1854         // Check if the set contains a descriptor type of the given type at the given iteration.
1855         bool containsTypeAtIteration (VkDescriptorType descriptorType, deUint32 iteration) const
1856         {
1857                 return std::any_of(begin(bindings), end(bindings),
1858                                    [descriptorType, iteration] (const BindingInterfacePtr& b) {
1859                                            const auto types = b->typesAtIteration(iteration);
1860                                            return de::contains(begin(types), end(types), descriptorType);
1861                                    });
1862         }
1863
1864         // Is any binding an array?
1865         bool hasArrays () const
1866         {
1867                 return std::any_of(begin(bindings), end(bindings), [] (const BindingInterfacePtr& b) { return b->isArray(); });
1868         }
1869 };
1870
1871 enum class UpdateType
1872 {
1873         WRITE = 0,
1874         COPY,
1875 };
1876
1877 enum class SourceSetType
1878 {
1879         NORMAL = 0,
1880         HOST_ONLY,
1881         NO_SOURCE,
1882 };
1883
1884 enum class UpdateMoment
1885 {
1886         NORMAL = 0,
1887         UPDATE_AFTER_BIND,
1888 };
1889
1890 enum class TestingStage
1891 {
1892         COMPUTE = 0,
1893         VERTEX,
1894         TESS_EVAL,
1895         TESS_CONTROL,
1896         GEOMETRY,
1897         FRAGMENT,
1898         RAY_GEN,
1899         INTERSECTION,
1900         ANY_HIT,
1901         CLOSEST_HIT,
1902         MISS,
1903         CALLABLE,
1904 };
1905
1906 enum class ArrayAccessType
1907 {
1908         CONSTANT = 0,
1909         PUSH_CONSTANT,
1910         NO_ARRAY,
1911 };
1912
1913 // Are we testing a ray tracing pipeline stage?
1914 bool isRayTracingStage (TestingStage stage)
1915 {
1916         switch (stage)
1917         {
1918         case TestingStage::RAY_GEN:
1919         case TestingStage::INTERSECTION:
1920         case TestingStage::ANY_HIT:
1921         case TestingStage::CLOSEST_HIT:
1922         case TestingStage::MISS:
1923         case TestingStage::CALLABLE:
1924                 return true;
1925         default:
1926                 break;
1927         }
1928
1929         return false;
1930 }
1931
1932 struct TestParams
1933 {
1934         DescriptorSetPtr    descriptorSet;
1935         UpdateType          updateType;
1936         SourceSetStrategy   sourceSetStrategy;
1937         SourceSetType       sourceSetType;
1938         PoolMutableStrategy poolMutableStrategy;
1939         UpdateMoment        updateMoment;
1940         ArrayAccessType     arrayAccessType;
1941         TestingStage        testingStage;
1942
1943         VkShaderStageFlags getStageFlags () const
1944         {
1945                 VkShaderStageFlags flags = 0u;
1946
1947                 switch (testingStage)
1948                 {
1949                 case TestingStage::COMPUTE:                     flags |= VK_SHADER_STAGE_COMPUTE_BIT;                                   break;
1950                 case TestingStage::VERTEX:                      flags |= VK_SHADER_STAGE_VERTEX_BIT;                                    break;
1951                 case TestingStage::TESS_EVAL:           flags |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;   break;
1952                 case TestingStage::TESS_CONTROL:        flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;              break;
1953                 case TestingStage::GEOMETRY:            flags |= VK_SHADER_STAGE_GEOMETRY_BIT;                                  break;
1954                 case TestingStage::FRAGMENT:            flags |= VK_SHADER_STAGE_FRAGMENT_BIT;                                  break;
1955                 case TestingStage::RAY_GEN:                     flags |= VK_SHADER_STAGE_RAYGEN_BIT_KHR;                                break;
1956                 case TestingStage::INTERSECTION:        flags |= VK_SHADER_STAGE_INTERSECTION_BIT_KHR;                  break;
1957                 case TestingStage::ANY_HIT:                     flags |= VK_SHADER_STAGE_ANY_HIT_BIT_KHR;                               break;
1958                 case TestingStage::CLOSEST_HIT:         flags |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR;                   break;
1959                 case TestingStage::MISS:                        flags |= VK_SHADER_STAGE_MISS_BIT_KHR;                                  break;
1960                 case TestingStage::CALLABLE:            flags |= VK_SHADER_STAGE_CALLABLE_BIT_KHR;                              break;
1961                 default:
1962                         DE_ASSERT(false);
1963                         break;
1964                 }
1965
1966                 return flags;
1967         }
1968
1969         VkPipelineStageFlags getPipelineWriteStage () const
1970         {
1971                 VkPipelineStageFlags flags = 0u;
1972
1973                 switch (testingStage)
1974                 {
1975                 case TestingStage::COMPUTE:                     flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;                                  break;
1976                 case TestingStage::VERTEX:                      flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;                                   break;
1977                 case TestingStage::TESS_EVAL:           flags |= VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT;  break;
1978                 case TestingStage::TESS_CONTROL:        flags |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT;             break;
1979                 case TestingStage::GEOMETRY:            flags |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;                                 break;
1980                 case TestingStage::FRAGMENT:            flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;                                 break;
1981                 case TestingStage::RAY_GEN:                     // fallthrough
1982                 case TestingStage::INTERSECTION:        // fallthrough
1983                 case TestingStage::ANY_HIT:                     // fallthrough
1984                 case TestingStage::CLOSEST_HIT:         // fallthrough
1985                 case TestingStage::MISS:                        // fallthrough
1986                 case TestingStage::CALLABLE:            flags |= VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR;                  break;
1987                 default:
1988                         DE_ASSERT(false);
1989                         break;
1990                 }
1991
1992                 return flags;
1993         }
1994
1995 private:
1996         VkDescriptorSetLayoutCreateFlags getLayoutCreateFlags (bool isSourceSet) const
1997         {
1998                 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
1999                 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2000
2001                 VkDescriptorSetLayoutCreateFlags createFlags = 0u;
2002
2003                 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2004                         createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT;
2005
2006                 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2007                         createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE;
2008
2009                 return createFlags;
2010         }
2011
2012 public:
2013         VkDescriptorSetLayoutCreateFlags getSrcLayoutCreateFlags () const
2014         {
2015                 return getLayoutCreateFlags(true);
2016         }
2017
2018         VkDescriptorSetLayoutCreateFlags getDstLayoutCreateFlags () const
2019         {
2020                 return getLayoutCreateFlags(false);
2021         }
2022
2023 private:
2024         VkDescriptorPoolCreateFlags getPoolCreateFlags (bool isSourceSet) const
2025         {
2026                 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2027                 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2028
2029                 VkDescriptorPoolCreateFlags poolCreateFlags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
2030
2031                 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2032                         poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT;
2033
2034                 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2035                         poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE;
2036
2037                 return poolCreateFlags;
2038         }
2039
2040 public:
2041         VkDescriptorPoolCreateFlags getSrcPoolCreateFlags () const
2042         {
2043                 return getPoolCreateFlags(true);
2044         }
2045
2046         VkDescriptorPoolCreateFlags getDstPoolCreateFlags () const
2047         {
2048                 return getPoolCreateFlags(false);
2049         }
2050
2051         VkPipelineBindPoint getBindPoint () const
2052         {
2053                 if (testingStage == TestingStage::COMPUTE)
2054                         return VK_PIPELINE_BIND_POINT_COMPUTE;
2055                 if (isRayTracingStage(testingStage))
2056                         return VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
2057                 return VK_PIPELINE_BIND_POINT_GRAPHICS;
2058         }
2059 };
2060
2061 class MutableTypesTest : public TestCase
2062 {
2063 public:
2064         MutableTypesTest (tcu::TestContext& testCtx, const std::string& name, const std::string& description, const TestParams& params)
2065                 : TestCase(testCtx, name, description)
2066                 , m_params(params)
2067         {}
2068
2069         ~MutableTypesTest () override = default;
2070
2071         void            initPrograms        (vk::SourceCollections& programCollection) const override;
2072         TestInstance*   createInstance      (Context& context) const override;
2073         void            checkSupport        (Context& context) const override;
2074
2075 private:
2076         TestParams      m_params;
2077 };
2078
2079 class MutableTypesInstance : public TestInstance
2080 {
2081 public:
2082         MutableTypesInstance (Context& context, const TestParams& params)
2083                 : TestInstance  (context)
2084                 , m_params      (params)
2085         {}
2086
2087         ~MutableTypesInstance () override = default;
2088
2089         tcu::TestStatus iterate () override;
2090
2091 private:
2092         TestParams      m_params;
2093 };
2094
2095 // Check if a descriptor set contains a given descriptor type in any iteration up to maxTypes().
2096 bool containsAnyDescriptorType (const DescriptorSet& descriptorSet, VkDescriptorType descriptorType)
2097 {
2098         const auto numIterations = descriptorSet.maxTypes();
2099
2100         for (deUint32 iter = 0u; iter < numIterations; ++iter)
2101         {
2102                 if (descriptorSet.containsTypeAtIteration(descriptorType, iter))
2103                         return true;
2104         }
2105
2106         return false;
2107 }
2108
2109 // Check if testing this descriptor set needs an external image (for sampler descriptors).
2110 bool needsExternalImage (const DescriptorSet& descriptorSet)
2111 {
2112         return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLER);
2113 }
2114
2115 // Check if testing this descriptor set needs an external sampler (for sampled images).
2116 bool needsExternalSampler (const DescriptorSet& descriptorSet)
2117 {
2118         return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
2119 }
2120
2121 // Check if this descriptor set contains a input attachments.
2122 bool usesInputAttachments (const DescriptorSet& descriptorSet)
2123 {
2124         return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2125 }
2126
2127 // Check if this descriptor set contains acceleration structures.
2128 bool usesAccelerationStructures (const DescriptorSet& descriptorSet)
2129 {
2130         return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
2131 }
2132
2133 std::string shaderName (deUint32 iteration)
2134 {
2135         return ("iteration-" + de::toString(iteration));
2136 }
2137
2138 void MutableTypesTest::initPrograms (vk::SourceCollections& programCollection) const
2139 {
2140         const bool                                              usePushConstants      = (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT);
2141         const bool                                              useExternalImage      = needsExternalImage(*m_params.descriptorSet);
2142         const bool                                              useExternalSampler    = needsExternalSampler(*m_params.descriptorSet);
2143         const bool                                              rayQueries            = usesAccelerationStructures(*m_params.descriptorSet);
2144         const bool                                              rayTracing            = isRayTracingStage(m_params.testingStage);
2145         const auto                                              numIterations         = m_params.descriptorSet->maxTypes();
2146         const auto                                              numBindings           = m_params.descriptorSet->numBindings();
2147         const vk::ShaderBuildOptions    rtBuildOptions        (programCollection.usedVulkanVersion, vk::SPIRV_VERSION_1_4, 0u, true);
2148
2149         // Extra set and bindings for external resources.
2150         std::ostringstream extraSet;
2151         deUint32           extraBindings = 0u;
2152
2153         extraSet << "layout (set=1, binding=" << extraBindings++ << ") buffer OutputBufferBlock { uint value[" << numIterations << "]; } outputBuffer;\n";
2154         if (useExternalImage)
2155                 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform utexture2D externalSampledImage;\n";
2156         if (useExternalSampler)
2157                 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform sampler externalSampler;\n";
2158         // The extra binding below will be declared in the "passthrough" ray generation shader.
2159 #if 0
2160         if (rayTracing)
2161                 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform accelerationStructureEXT externalAS;\n";
2162 #endif
2163
2164         // Common vertex preamble.
2165         std::ostringstream vertexPreamble;
2166         vertexPreamble
2167                         << "vec2 vertexPositions[3] = vec2[](\n"
2168                         << "  vec2(0.0, -0.5),\n"
2169                         << "  vec2(0.5, 0.5),\n"
2170                         << "  vec2(-0.5, 0.5)\n"
2171                         << ");\n"
2172                         ;
2173
2174         // Vertex shader body common statements.
2175         std::ostringstream vertexBodyCommon;
2176         vertexBodyCommon << "  gl_Position = vec4(vertexPositions[gl_VertexIndex], 0.0, 1.0);\n";
2177
2178         // Common tessellation control preamble.
2179         std::ostringstream tescPreamble;
2180         tescPreamble
2181                 << "layout (vertices=3) out;\n"
2182                 << "in gl_PerVertex\n"
2183                 << "{\n"
2184                 << "  vec4 gl_Position;\n"
2185                 << "} gl_in[gl_MaxPatchVertices];\n"
2186                 << "out gl_PerVertex\n"
2187                 << "{\n"
2188                 << "  vec4 gl_Position;\n"
2189                 << "} gl_out[];\n"
2190                 ;
2191
2192         // Common tessellation control body.
2193         std::ostringstream tescBodyCommon;
2194         tescBodyCommon
2195                 << "  gl_TessLevelInner[0] = 1.0;\n"
2196                 << "  gl_TessLevelInner[1] = 1.0;\n"
2197                 << "  gl_TessLevelOuter[0] = 1.0;\n"
2198                 << "  gl_TessLevelOuter[1] = 1.0;\n"
2199                 << "  gl_TessLevelOuter[2] = 1.0;\n"
2200                 << "  gl_TessLevelOuter[3] = 1.0;\n"
2201                 << "  gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2202                 ;
2203
2204         // Common tessellation evaluation preamble.
2205         std::ostringstream tesePreamble;
2206         tesePreamble
2207                 << "layout (triangles, fractional_odd_spacing, cw) in;\n"
2208                 << "in gl_PerVertex\n"
2209                 << "{\n"
2210                 << "  vec4 gl_Position;\n"
2211                 << "} gl_in[gl_MaxPatchVertices];\n"
2212                 << "out gl_PerVertex\n"
2213                 << "{\n"
2214                 << "  vec4 gl_Position;\n"
2215                 << "};\n"
2216                 ;
2217
2218         // Common tessellation evaluation body.
2219         std::ostringstream teseBodyCommon;
2220         teseBodyCommon
2221                 << "  gl_Position = (gl_TessCoord.x * gl_in[0].gl_Position) +\n"
2222                 << "                (gl_TessCoord.y * gl_in[1].gl_Position) +\n"
2223                 << "                (gl_TessCoord.z * gl_in[2].gl_Position);\n"
2224                 ;
2225
2226         // Shader preamble.
2227         std::ostringstream preamble;
2228
2229         preamble
2230                 << "#version 460\n"
2231                 << "#extension GL_EXT_nonuniform_qualifier : enable\n"
2232                 << "#extension GL_EXT_debug_printf : enable\n"
2233                 << (rayTracing ? "#extension GL_EXT_ray_tracing : enable\n" : "")
2234                 << (rayQueries ? "#extension GL_EXT_ray_query : enable\n" : "")
2235                 << "\n"
2236                 ;
2237
2238         if (m_params.testingStage == TestingStage::VERTEX)
2239         {
2240                 preamble << vertexPreamble.str();
2241         }
2242         else if (m_params.testingStage == TestingStage::COMPUTE)
2243         {
2244                 preamble
2245                         << "layout (local_size_x=1, local_size_y=1, local_size_z=1) in;\n"
2246                         << "\n"
2247                         ;
2248         }
2249         else if (m_params.testingStage == TestingStage::GEOMETRY)
2250         {
2251                 preamble
2252                         << "layout (triangles) in;\n"
2253                         << "layout (triangle_strip, max_vertices=3) out;\n"
2254                         << "in gl_PerVertex\n"
2255                         << "{\n"
2256                         << "  vec4 gl_Position;\n"
2257                         << "} gl_in[3];\n"
2258                         << "out gl_PerVertex\n"
2259                         << "{\n"
2260                         << "  vec4 gl_Position;\n"
2261                         << "};\n"
2262                         ;
2263         }
2264         else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2265         {
2266                 preamble << tescPreamble.str();
2267         }
2268         else if (m_params.testingStage == TestingStage::TESS_EVAL)
2269         {
2270                 preamble << tesePreamble.str();
2271         }
2272         else if (m_params.testingStage == TestingStage::CALLABLE)
2273         {
2274                 preamble << "layout (location=0) callableDataInEXT float unusedCallableData;\n";
2275         }
2276         else if (m_params.testingStage == TestingStage::CLOSEST_HIT ||
2277                          m_params.testingStage == TestingStage::ANY_HIT ||
2278                          m_params.testingStage == TestingStage::MISS)
2279         {
2280                 preamble << "layout (location=0) rayPayloadInEXT float unusedRayPayload;\n";
2281         }
2282         else if (m_params.testingStage == TestingStage::INTERSECTION)
2283         {
2284                 preamble << "hitAttributeEXT vec3 hitAttribute;\n";
2285         }
2286
2287         preamble << extraSet.str();
2288         if (usePushConstants)
2289                 preamble << "layout (push_constant, std430) uniform PushConstantBlock { uint zero; } pc;\n";
2290         preamble << "\n";
2291
2292         // We need to create a shader per iteration.
2293         for (deUint32 iter = 0u; iter < numIterations; ++iter)
2294         {
2295                 // Shader preamble.
2296                 std::ostringstream shader;
2297                 shader << preamble.str();
2298
2299                 deUint32 inputAttachmentCount = 0u;
2300
2301                 // Descriptor declarations for this iteration.
2302                 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2303                 {
2304                         DE_ASSERT(bindingIdx <= std::numeric_limits<deUint32>::max());
2305
2306                         const auto binding            = m_params.descriptorSet->getBinding(bindingIdx);
2307                         const auto bindingTypes       = binding->typesAtIteration(iter);
2308                         const auto hasInputAttachment = de::contains(begin(bindingTypes), end(bindingTypes), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2309                         const auto isArray            = binding->isArray();
2310                         const auto isUnbounded        = binding->isUnbounded();
2311                         const auto bindingSize        = binding->size();
2312
2313                         // If the binding is an input attachment, make sure it's not an array.
2314                         DE_ASSERT(!hasInputAttachment || !isArray);
2315
2316                         // Make sure the descriptor count fits a deInt32 if needed.
2317                         DE_ASSERT(!isArray || isUnbounded || bindingSize <= static_cast<size_t>(std::numeric_limits<deInt32>::max()));
2318
2319                         const auto arraySize = (isArray ? (isUnbounded ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(bindingSize)))
2320                                                         : tcu::Nothing);
2321
2322                         shader << binding->glslDeclarations(iter, 0u, static_cast<deUint32>(bindingIdx), inputAttachmentCount, arraySize);
2323
2324                         if (hasInputAttachment)
2325                                 ++inputAttachmentCount;
2326                 }
2327
2328                 // Main body.
2329                 shader
2330                         << "\n"
2331                         << "void main() {\n"
2332                         // This checks if we are the first invocation to arrive here, so the checks are executed only once.
2333                         << "  const uint flag = atomicCompSwap(outputBuffer.value[" << iter << "], 0u, 1u);\n"
2334                         << "  if (flag == 0u) {\n"
2335                         << "    uint anyError = 0u;\n"
2336                         ;
2337
2338                 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2339                 {
2340                         const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2341                         const auto idx32 = static_cast<deUint32>(bindingIdx);
2342                         shader << binding->glslCheckStatements(iter, 0u, idx32, getDescriptorNumericValue(iter, idx32), tcu::Nothing, usePushConstants);
2343                 }
2344
2345                 shader
2346                         << "    if (anyError == 0u) {\n"
2347                         << "      atomicAdd(outputBuffer.value[" << iter << "], 1u);\n"
2348                         << "    }\n"
2349                         << "  }\n" // Closes if (flag == 0u).
2350                         ;
2351
2352                 if (m_params.testingStage == TestingStage::VERTEX)
2353                 {
2354                         shader << vertexBodyCommon.str();
2355                 }
2356                 else if (m_params.testingStage == TestingStage::GEOMETRY)
2357                 {
2358                         shader
2359                                 << "  gl_Position = gl_in[0].gl_Position; EmitVertex();\n"
2360                                 << "  gl_Position = gl_in[1].gl_Position; EmitVertex();\n"
2361                                 << "  gl_Position = gl_in[2].gl_Position; EmitVertex();\n"
2362                                 ;
2363                 }
2364                 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2365                 {
2366                         shader << tescBodyCommon.str();
2367                 }
2368                 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2369                 {
2370                         shader << teseBodyCommon.str();
2371                 }
2372
2373                 shader
2374                         << "}\n" // End of main().
2375                         ;
2376
2377                 {
2378                         const auto      shaderNameStr   = shaderName(iter);
2379                         const auto      shaderStr               = shader.str();
2380                         auto&           glslSource              = programCollection.glslSources.add(shaderNameStr);
2381
2382                         if (m_params.testingStage == TestingStage::COMPUTE)
2383                                 glslSource << glu::ComputeSource(shaderStr);
2384                         else if (m_params.testingStage == TestingStage::VERTEX)
2385                                 glslSource << glu::VertexSource(shaderStr);
2386                         else if (m_params.testingStage == TestingStage::FRAGMENT)
2387                                 glslSource << glu::FragmentSource(shaderStr);
2388                         else if (m_params.testingStage == TestingStage::GEOMETRY)
2389                                 glslSource << glu::GeometrySource(shaderStr);
2390                         else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2391                                 glslSource << glu::TessellationControlSource(shaderStr);
2392                         else if (m_params.testingStage == TestingStage::TESS_EVAL)
2393                                 glslSource << glu::TessellationEvaluationSource(shaderStr);
2394                         else if (m_params.testingStage == TestingStage::RAY_GEN)
2395                                 glslSource << glu::RaygenSource(updateRayTracingGLSL(shaderStr));
2396                         else if (m_params.testingStage == TestingStage::INTERSECTION)
2397                                 glslSource << glu::IntersectionSource(updateRayTracingGLSL(shaderStr));
2398                         else if (m_params.testingStage == TestingStage::ANY_HIT)
2399                                 glslSource << glu::AnyHitSource(updateRayTracingGLSL(shaderStr));
2400                         else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
2401                                 glslSource << glu::ClosestHitSource(updateRayTracingGLSL(shaderStr));
2402                         else if (m_params.testingStage == TestingStage::MISS)
2403                                 glslSource << glu::MissSource(updateRayTracingGLSL(shaderStr));
2404                         else if (m_params.testingStage == TestingStage::CALLABLE)
2405                                 glslSource << glu::CallableSource(updateRayTracingGLSL(shaderStr));
2406                         else
2407                                 DE_ASSERT(false);
2408
2409                         if (rayTracing || rayQueries)
2410                                 glslSource << rtBuildOptions;
2411                 }
2412         }
2413
2414         if (m_params.testingStage == TestingStage::FRAGMENT
2415                 || m_params.testingStage == TestingStage::GEOMETRY
2416                 || m_params.testingStage == TestingStage::TESS_CONTROL
2417                 || m_params.testingStage == TestingStage::TESS_EVAL)
2418         {
2419                 // Add passthrough vertex shader that works for points.
2420                 std::ostringstream vertPassthrough;
2421                 vertPassthrough
2422                         << "#version 460\n"
2423                         << "out gl_PerVertex\n"
2424                         << "{\n"
2425                         << "  vec4 gl_Position;\n"
2426                         << "};\n"
2427                         << vertexPreamble.str()
2428                         << "void main() {\n"
2429                         << vertexBodyCommon.str()
2430                         << "}\n"
2431                         ;
2432                 programCollection.glslSources.add("vert") << glu::VertexSource(vertPassthrough.str());
2433         }
2434
2435         if (m_params.testingStage == TestingStage::TESS_CONTROL)
2436         {
2437                 // Add passthrough tessellation evaluation shader.
2438                 std::ostringstream tesePassthrough;
2439                 tesePassthrough
2440                         << "#version 460\n"
2441                         << tesePreamble.str()
2442                         << "void main (void)\n"
2443                         << "{\n"
2444                         << teseBodyCommon.str()
2445                         << "}\n"
2446                         ;
2447
2448                 programCollection.glslSources.add("tese") << glu::TessellationEvaluationSource(tesePassthrough.str());
2449         }
2450
2451         if (m_params.testingStage == TestingStage::TESS_EVAL)
2452         {
2453                 // Add passthrough tessellation control shader.
2454                 std::ostringstream tescPassthrough;
2455                 tescPassthrough
2456                         << "#version 460\n"
2457                         << tescPreamble.str()
2458                         << "void main (void)\n"
2459                         << "{\n"
2460                         << tescBodyCommon.str()
2461                         << "}\n"
2462                         ;
2463
2464                 programCollection.glslSources.add("tesc") << glu::TessellationControlSource(tescPassthrough.str());
2465         }
2466
2467         if (rayTracing && m_params.testingStage != TestingStage::RAY_GEN)
2468         {
2469                 // Add a "passthrough" ray generation shader.
2470                 std::ostringstream rgen;
2471                 rgen
2472                         << "#version 460 core\n"
2473                         << "#extension GL_EXT_ray_tracing : require\n"
2474                         << "layout (set=1, binding=" << extraBindings << ") uniform accelerationStructureEXT externalAS;\n"
2475                         << ((m_params.testingStage == TestingStage::CALLABLE)
2476                                 ? "layout (location=0) callableDataEXT float unusedCallableData;\n"
2477                                 : "layout (location=0) rayPayloadEXT float unusedRayPayload;\n")
2478                         << "\n"
2479                         << "void main()\n"
2480                         << "{\n"
2481                         ;
2482
2483                 if (m_params.testingStage == TestingStage::INTERSECTION
2484                         || m_params.testingStage == TestingStage::ANY_HIT
2485                         || m_params.testingStage == TestingStage::CLOSEST_HIT
2486                         || m_params.testingStage == TestingStage::MISS)
2487                 {
2488                         // We need to trace rays in this case to get hits or misses.
2489                         const auto zDir = ((m_params.testingStage == TestingStage::MISS) ? "-1.0" : "1.0");
2490
2491                         rgen
2492                                 << "  const uint cullMask = 0xFF;\n"
2493                                 << "  const float tMin = 1.0;\n"
2494                                 << "  const float tMax = 10.0;\n"
2495                                 << "  const vec3 origin = vec3(0.0, 0.0, 0.0);\n"
2496                                 << "  const vec3 direction = vec3(0.0, 0.0, " << zDir << ");\n"
2497                                 << "  traceRayEXT(externalAS, gl_RayFlagsNoneEXT, cullMask, 0, 0, 0, origin, tMin, direction, tMax, 0);\n"
2498                                 ;
2499
2500                 }
2501                 else if (m_params.testingStage == TestingStage::CALLABLE)
2502                 {
2503                         rgen << "  executeCallableEXT(0, 0);\n";
2504                 }
2505
2506                 // End of main().
2507                 rgen << "}\n";
2508
2509                 programCollection.glslSources.add("rgen") << glu::RaygenSource(updateRayTracingGLSL(rgen.str())) << rtBuildOptions;
2510
2511                 // Intersection shaders will ignore the intersection, so we need a passthrough miss shader.
2512                 if (m_params.testingStage == TestingStage::INTERSECTION)
2513                 {
2514                         std::ostringstream miss;
2515                         miss
2516                                 << "#version 460 core\n"
2517                                 << "#extension GL_EXT_ray_tracing : require\n"
2518                                 << "layout (location=0) rayPayloadEXT float unusedRayPayload;\n"
2519                                 << "\n"
2520                                 << "void main()\n"
2521                                 << "{\n"
2522                                 << "}\n"
2523                                 ;
2524
2525                         programCollection.glslSources.add("miss") << glu::MissSource(updateRayTracingGLSL(miss.str())) << rtBuildOptions;
2526                 }
2527         }
2528 }
2529
2530 TestInstance* MutableTypesTest::createInstance (Context& context) const
2531 {
2532         return new MutableTypesInstance(context, m_params);
2533 }
2534
2535 void requirePartiallyBound (Context& context)
2536 {
2537         context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2538         const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2539         if (!indexingFeatures.descriptorBindingPartiallyBound)
2540                 TCU_THROW(NotSupportedError, "Partially bound bindings not supported");
2541 }
2542
2543 void requireVariableDescriptorCount (Context& context)
2544 {
2545         context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2546         const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2547         if (!indexingFeatures.descriptorBindingVariableDescriptorCount)
2548                 TCU_THROW(NotSupportedError, "Variable descriptor count not supported");
2549 }
2550
2551 // Calculates the set of used descriptor types for a given set and iteration count, for bindings matching a predicate.
2552 std::set<VkDescriptorType> getUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations, bool (*predicate)(const BindingInterface* binding))
2553 {
2554         std::set<VkDescriptorType> usedDescriptorTypes;
2555
2556         for (size_t bindingIdx = 0; bindingIdx < descriptorSet.numBindings(); ++bindingIdx)
2557         {
2558                 const auto bindingPtr = descriptorSet.getBinding(bindingIdx);
2559                 if (predicate(bindingPtr))
2560                 {
2561                         for (deUint32 iter = 0u; iter < numIterations; ++iter)
2562                         {
2563                                 const auto descTypes = bindingPtr->typesAtIteration(iter);
2564                                 usedDescriptorTypes.insert(begin(descTypes), end(descTypes));
2565                         }
2566                 }
2567         }
2568
2569         return usedDescriptorTypes;
2570 }
2571
2572 std::set<VkDescriptorType> getAllUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2573 {
2574         return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface*) { return true; });
2575 }
2576
2577 std::set<VkDescriptorType> getUsedArrayDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2578 {
2579         return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface* b) { return b->isArray(); });
2580 }
2581
2582 // Are we testing a vertex pipeline stage?
2583 bool isVertexStage (TestingStage stage)
2584 {
2585         switch (stage)
2586         {
2587         case TestingStage::VERTEX:
2588         case TestingStage::TESS_CONTROL:
2589         case TestingStage::TESS_EVAL:
2590         case TestingStage::GEOMETRY:
2591                 return true;
2592         default:
2593                 break;
2594         }
2595
2596         return false;
2597 }
2598
2599 void MutableTypesTest::checkSupport (Context& context) const
2600 {
2601         context.requireDeviceFunctionality("VK_VALVE_mutable_descriptor_type");
2602
2603         // Check ray tracing if needed.
2604         const bool rayTracing = isRayTracingStage(m_params.testingStage);
2605
2606         if (rayTracing)
2607         {
2608                 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2609                 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
2610         }
2611
2612         // Check if ray queries are needed. Ray queries are used to verify acceleration structure descriptors.
2613         const bool rayQueriesNeeded = usesAccelerationStructures(*m_params.descriptorSet);
2614         if (rayQueriesNeeded)
2615         {
2616                 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2617                 context.requireDeviceFunctionality("VK_KHR_ray_query");
2618         }
2619
2620         // We'll use iterations to check each mutable type, as needed.
2621         const auto numIterations = m_params.descriptorSet->maxTypes();
2622
2623         if (m_params.descriptorSet->lastBindingIsUnbounded())
2624                 requireVariableDescriptorCount(context);
2625
2626         for (deUint32 iter = 0u; iter < numIterations; ++iter)
2627         {
2628                 if (m_params.descriptorSet->needsAliasing(iter))
2629                 {
2630                         requirePartiallyBound(context);
2631                         break;
2632                 }
2633         }
2634
2635         if (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2636         {
2637                 // Check update after bind for each used descriptor type.
2638                 const auto& usedDescriptorTypes = getAllUsedDescriptorTypes(*m_params.descriptorSet, numIterations);
2639                 const auto& indexingFeatures    = context.getDescriptorIndexingFeatures();
2640
2641                 for (const auto& descType : usedDescriptorTypes)
2642                 {
2643                         switch (descType)
2644                         {
2645                         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2646                         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2647                                 if (!indexingFeatures.descriptorBindingUniformBufferUpdateAfterBind)
2648                                         TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform buffers");
2649                                 break;
2650
2651                         case VK_DESCRIPTOR_TYPE_SAMPLER:
2652                         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2653                         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2654                                 if (!indexingFeatures.descriptorBindingSampledImageUpdateAfterBind)
2655                                         TCU_THROW(NotSupportedError, "Update-after-bind not supported for samplers and sampled images");
2656                                 break;
2657
2658                         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2659                                 if (!indexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
2660                                         TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage images");
2661                                 break;
2662
2663                         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2664                         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2665                                 if (!indexingFeatures.descriptorBindingStorageBufferUpdateAfterBind)
2666                                         TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage buffers");
2667                                 break;
2668
2669                         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2670                                 if (!indexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind)
2671                                         TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform texel buffers");
2672                                 break;
2673
2674                         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2675                                 if (!indexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind)
2676                                         TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage texel buffers");
2677                                 break;
2678
2679                         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2680                                 TCU_THROW(InternalError, "Tests do not support update-after-bind with input attachments");
2681
2682                         case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
2683                                 {
2684                                         // Just in case we ever mix some of these in.
2685                                         context.requireDeviceFunctionality("VK_EXT_inline_uniform_block");
2686                                         const auto& iubFeatures = context.getInlineUniformBlockFeatures();
2687                                         if (!iubFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind)
2688                                                 TCU_THROW(NotSupportedError, "Update-after-bind not supported for inline uniform blocks");
2689                                 }
2690                                 break;
2691
2692                         case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2693                                 {
2694                                         // Just in case we ever mix some of these in.
2695                                         context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2696                                         const auto& asFeatures = context.getAccelerationStructureFeatures();
2697                                         if (!asFeatures.descriptorBindingAccelerationStructureUpdateAfterBind)
2698                                                 TCU_THROW(NotSupportedError, "Update-after-bind not supported for acceleration structures");
2699                                 }
2700                                 break;
2701
2702                         case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:
2703                                 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_VALVE in list of used descriptor types");
2704
2705                         default:
2706                                 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2707                         }
2708                 }
2709         }
2710
2711         if (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT)
2712         {
2713                 // These require dynamically uniform indices.
2714                 const auto& usedDescriptorTypes         = getUsedArrayDescriptorTypes(*m_params.descriptorSet, numIterations);
2715                 const auto& features                    = context.getDeviceFeatures();
2716                 const auto descriptorIndexingSupported  = context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing");
2717                 const auto& indexingFeatures            = context.getDescriptorIndexingFeatures();
2718
2719                 for (const auto& descType : usedDescriptorTypes)
2720                 {
2721                         switch (descType)
2722                         {
2723                         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2724                         case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2725                                 if (!features.shaderUniformBufferArrayDynamicIndexing)
2726                                         TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform buffers");
2727                                 break;
2728
2729                         case VK_DESCRIPTOR_TYPE_SAMPLER:
2730                         case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2731                         case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2732                                 if (!features.shaderSampledImageArrayDynamicIndexing)
2733                                         TCU_THROW(NotSupportedError, "Dynamic indexing not supported for samplers and sampled images");
2734                                 break;
2735
2736                         case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2737                                 if (!features.shaderStorageImageArrayDynamicIndexing)
2738                                         TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage images");
2739                                 break;
2740
2741                         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2742                         case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2743                                 if (!features.shaderStorageBufferArrayDynamicIndexing)
2744                                         TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage buffers");
2745                                 break;
2746
2747                         case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2748                                 if (!descriptorIndexingSupported || !indexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing)
2749                                         TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform texel buffers");
2750                                 break;
2751
2752                         case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2753                                 if (!descriptorIndexingSupported || !indexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing)
2754                                         TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage texel buffers");
2755                                 break;
2756
2757                         case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2758                                 if (!descriptorIndexingSupported || !indexingFeatures.shaderInputAttachmentArrayDynamicIndexing)
2759                                         TCU_THROW(NotSupportedError, "Dynamic indexing not supported for input attachments");
2760                                 break;
2761
2762                         case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2763                                 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2764                                 break;
2765
2766                         case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:
2767                                 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_VALVE in list of used array descriptor types");
2768
2769                         default:
2770                                 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2771                         }
2772                 }
2773         }
2774
2775         // Check layout support.
2776         {
2777                 const auto& vkd               = context.getDeviceInterface();
2778                 const auto  device            = context.getDevice();
2779                 const auto  stageFlags        = m_params.getStageFlags();
2780
2781                 {
2782                         const auto  layoutCreateFlags = m_params.getDstLayoutCreateFlags();
2783                         const auto  supported         = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2784
2785                         if (!supported)
2786                                 TCU_THROW(NotSupportedError, "Required descriptor set layout not supported");
2787                 }
2788
2789                 if (m_params.updateType == UpdateType::COPY)
2790                 {
2791                         const auto  layoutCreateFlags = m_params.getSrcLayoutCreateFlags();
2792                         const auto  supported         = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2793
2794                         if (!supported)
2795                                 TCU_THROW(NotSupportedError, "Required descriptor set layout for source set not supported");
2796
2797                         // Check specific layouts for the different source sets are supported.
2798                         for (deUint32 iter = 0u; iter < numIterations; ++iter)
2799                         {
2800                                 const auto srcSet             = m_params.descriptorSet->genSourceSet(m_params.sourceSetStrategy, iter);
2801                                 const auto srcLayoutSupported = srcSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2802
2803                                 if (!srcLayoutSupported)
2804                                         TCU_THROW(NotSupportedError, "Descriptor set layout for source set at iteration " + de::toString(iter) + " not supported");
2805                         }
2806                 }
2807         }
2808
2809         // Check supported stores and stages.
2810         const bool vertexStage   = isVertexStage(m_params.testingStage);
2811         const bool fragmentStage = (m_params.testingStage == TestingStage::FRAGMENT);
2812         const bool geometryStage = (m_params.testingStage == TestingStage::GEOMETRY);
2813         const bool tessellation  = (m_params.testingStage == TestingStage::TESS_CONTROL || m_params.testingStage == TestingStage::TESS_EVAL);
2814
2815         const auto& features = context.getDeviceFeatures();
2816
2817         if (vertexStage && !features.vertexPipelineStoresAndAtomics)
2818                 TCU_THROW(NotSupportedError, "Vertex pipeline stores and atomics not supported");
2819
2820         if (fragmentStage && !features.fragmentStoresAndAtomics)
2821                 TCU_THROW(NotSupportedError, "Fragment shader stores and atomics not supported");
2822
2823         if (geometryStage && !features.geometryShader)
2824                 TCU_THROW(NotSupportedError, "Geometry shader not supported");
2825
2826         if (tessellation && !features.tessellationShader)
2827                 TCU_THROW(NotSupportedError, "Tessellation shaders not supported");
2828 }
2829
2830 // What to do at each iteration step. Used to apply UPDATE_AFTER_BIND or not.
2831 enum class Step
2832 {
2833         UPDATE = 0,
2834         BIND,
2835 };
2836
2837 // Create render pass.
2838 Move<VkRenderPass> buildRenderPass (const DeviceInterface& vkd, VkDevice device, const std::vector<Resource>& resources)
2839 {
2840         const auto imageFormat = getDescriptorImageFormat();
2841
2842         std::vector<VkAttachmentDescription>    attachmentDescriptions;
2843         std::vector<VkAttachmentReference>      attachmentReferences;
2844         std::vector<deUint32>                   attachmentIndices;
2845
2846         for (const auto& resource : resources)
2847         {
2848                 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
2849                 {
2850                         const auto nextIndex = static_cast<deUint32>(attachmentDescriptions.size());
2851
2852                         const VkAttachmentDescription description = {
2853                                 0u,                                 //  VkAttachmentDescriptionFlags    flags;
2854                                 imageFormat,                        //  VkFormat                                                format;
2855                                 VK_SAMPLE_COUNT_1_BIT,              //  VkSampleCountFlagBits                   samples;
2856                                 VK_ATTACHMENT_LOAD_OP_LOAD,         //  VkAttachmentLoadOp                              loadOp;
2857                                 VK_ATTACHMENT_STORE_OP_DONT_CARE,   //  VkAttachmentStoreOp                             storeOp;
2858                                 VK_ATTACHMENT_LOAD_OP_DONT_CARE,    //  VkAttachmentLoadOp                              stencilLoadOp;
2859                                 VK_ATTACHMENT_STORE_OP_DONT_CARE,   //  VkAttachmentStoreOp                             stencilStoreOp;
2860                                 VK_IMAGE_LAYOUT_GENERAL,            //  VkImageLayout                                   initialLayout;
2861                                 VK_IMAGE_LAYOUT_GENERAL,            //  VkImageLayout                                   finalLayout;
2862                         };
2863
2864                         const VkAttachmentReference reference = { nextIndex, VK_IMAGE_LAYOUT_GENERAL };
2865
2866                         attachmentIndices.push_back(nextIndex);
2867                         attachmentDescriptions.push_back(description);
2868                         attachmentReferences.push_back(reference);
2869                 }
2870         }
2871
2872         const auto attachmentCount = static_cast<deUint32>(attachmentDescriptions.size());
2873         DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentIndices.size()));
2874         DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentReferences.size()));
2875
2876         const VkSubpassDescription subpassDescription =
2877         {
2878                 0u,                                     //  VkSubpassDescriptionFlags           flags;
2879                 VK_PIPELINE_BIND_POINT_GRAPHICS,        //  VkPipelineBindPoint                         pipelineBindPoint;
2880                 attachmentCount,                        //  deUint32                                            inputAttachmentCount;
2881                 de::dataOrNull(attachmentReferences),   //  const VkAttachmentReference*        pInputAttachments;
2882                 0u,                                     //  deUint32                                            colorAttachmentCount;
2883                 nullptr,                                //  const VkAttachmentReference*        pColorAttachments;
2884                 0u,                                     //  const VkAttachmentReference*        pResolveAttachments;
2885                 nullptr,                                //  const VkAttachmentReference*        pDepthStencilAttachment;
2886                 0u,                                     //  deUint32                                            preserveAttachmentCount;
2887                 nullptr,                                //  const deUint32*                                     pPreserveAttachments;
2888         };
2889
2890         const VkRenderPassCreateInfo renderPassCreateInfo =
2891         {
2892                 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,              //  VkStructureType                                     sType;
2893                 nullptr,                                                //  const void*                                         pNext;
2894                 0u,                                                     //  VkRenderPassCreateFlags                     flags;
2895                 static_cast<deUint32>(attachmentDescriptions.size()),   //  deUint32                                            attachmentCount;
2896                 de::dataOrNull(attachmentDescriptions),                 //  const VkAttachmentDescription*      pAttachments;
2897                 1u,                                                     //  deUint32                                            subpassCount;
2898                 &subpassDescription,                                    //  const VkSubpassDescription*         pSubpasses;
2899                 0u,                                                     //  deUint32                                            dependencyCount;
2900                 nullptr,                                                //  const VkSubpassDependency*          pDependencies;
2901         };
2902
2903         return createRenderPass(vkd, device, &renderPassCreateInfo);
2904 }
2905
2906 // Create a graphics pipeline.
2907 Move<VkPipeline> buildGraphicsPipeline (const DeviceInterface& vkd, VkDevice device, VkPipelineLayout pipelineLayout,
2908                                                                                 VkShaderModule vertModule,
2909                                                                                 VkShaderModule tescModule,
2910                                                                                 VkShaderModule teseModule,
2911                                                                                 VkShaderModule geomModule,
2912                                                                                 VkShaderModule fragModule,
2913                                                                                 VkRenderPass renderPass)
2914 {
2915         const auto                    extent    = getDefaultExtent();
2916         const std::vector<VkViewport> viewports (1u, makeViewport(extent));
2917         const std::vector<VkRect2D>   scissors  (1u, makeRect2D(extent));
2918         const auto                    hasTess   = (tescModule != DE_NULL || teseModule != DE_NULL);
2919         const auto                    topology  = (hasTess ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
2920
2921
2922         const VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo = initVulkanStructure();
2923
2924         const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo = {
2925                 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,    //  VkStructureType                                                     sType;
2926                 nullptr,                                                        //  const void*                                                         pNext;
2927                 0u,                                                             //  VkPipelineInputAssemblyStateCreateFlags     flags;
2928                 topology,                                                       //  VkPrimitiveTopology                                         topology;
2929                 VK_FALSE,                                                       //  VkBool32                                                            primitiveRestartEnable;
2930         };
2931
2932         const VkPipelineTessellationStateCreateInfo tessellationStateCreateInfo = {
2933                 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,  //  VkStructureType                                                 sType;
2934                 nullptr,                                                    //  const void*                                                             pNext;
2935                 0u,                                                         //  VkPipelineTessellationStateCreateFlags  flags;
2936                 (hasTess ? 3u : 0u),                                        //  deUint32                                                                patchControlPoints;
2937         };
2938
2939         const VkPipelineViewportStateCreateInfo viewportStateCreateInfo = {
2940                 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,  //  VkStructureType                                             sType;
2941                 nullptr,                                                //  const void*                                                 pNext;
2942                 0u,                                                     //  VkPipelineViewportStateCreateFlags  flags;
2943                 static_cast<deUint32>(viewports.size()),                //  deUint32                                                    viewportCount;
2944                 de::dataOrNull(viewports),                              //  const VkViewport*                                   pViewports;
2945                 static_cast<deUint32>(scissors.size()),                 //  deUint32                                                    scissorCount;
2946                 de::dataOrNull(scissors),                               //  const VkRect2D*                                             pScissors;
2947         };
2948
2949         const VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo = {
2950                 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, //  VkStructureType                                                 sType;
2951                 nullptr,                                                    //  const void*                                                             pNext;
2952                 0u,                                                         //  VkPipelineRasterizationStateCreateFlags flags;
2953                 VK_FALSE,                                                   //  VkBool32                                                                depthClampEnable;
2954                 (fragModule == DE_NULL ? VK_TRUE : VK_FALSE),               //  VkBool32                                                                rasterizerDiscardEnable;
2955                 VK_POLYGON_MODE_FILL,                                       //  VkPolygonMode                                                   polygonMode;
2956                 VK_CULL_MODE_NONE,                                          //  VkCullModeFlags                                                 cullMode;
2957                 VK_FRONT_FACE_CLOCKWISE,                                    //  VkFrontFace                                                             frontFace;
2958                 VK_FALSE,                                                   //  VkBool32                                                                depthBiasEnable;
2959                 0.0f,                                                       //  float                                                                   depthBiasConstantFactor;
2960                 0.0f,                                                       //  float                                                                   depthBiasClamp;
2961                 0.0f,                                                       //  float                                                                   depthBiasSlopeFactor;
2962                 1.0f,                                                       //  float                                                                   lineWidth;
2963         };
2964
2965         const VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo = {
2966                 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,   //  VkStructureType                                                 sType;
2967                 nullptr,                                                    //  const void*                                                             pNext;
2968                 0u,                                                         //  VkPipelineMultisampleStateCreateFlags   flags;
2969                 VK_SAMPLE_COUNT_1_BIT,                                      //  VkSampleCountFlagBits                                   rasterizationSamples;
2970                 VK_FALSE,                                                   //  VkBool32                                                                sampleShadingEnable;
2971                 1.0f,                                                       //  float                                                                   minSampleShading;
2972                 nullptr,                                                    //  const VkSampleMask*                                             pSampleMask;
2973                 VK_FALSE,                                                   //  VkBool32                                                                alphaToCoverageEnable;
2974                 VK_FALSE,                                                   //  VkBool32                                                                alphaToOneEnable;
2975         };
2976
2977         const VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo = initVulkanStructure();
2978
2979         const VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo = initVulkanStructure();
2980
2981         return makeGraphicsPipeline(vkd, device, pipelineLayout,
2982                                     vertModule, tescModule, teseModule, geomModule, fragModule,
2983                                     renderPass, 0u, &vertexInputStateCreateInfo, &inputAssemblyStateCreateInfo,
2984                                     (hasTess ? &tessellationStateCreateInfo : nullptr), &viewportStateCreateInfo,
2985                                                                 &rasterizationStateCreateInfo, &multisampleStateCreateInfo,
2986                                     &depthStencilStateCreateInfo, &colorBlendStateCreateInfo, nullptr);
2987 }
2988
2989 Move<VkFramebuffer> buildFramebuffer (const DeviceInterface& vkd, VkDevice device, VkRenderPass renderPass, const std::vector<Resource>& resources)
2990 {
2991         const auto extent = getDefaultExtent();
2992
2993         std::vector<VkImageView> inputAttachments;
2994         for (const auto& resource : resources)
2995         {
2996                 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
2997                         inputAttachments.push_back(resource.imageView.get());
2998         }
2999
3000         const VkFramebufferCreateInfo framebufferCreateInfo =
3001         {
3002                 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,              //      VkStructureType                         sType;
3003                 nullptr,                                                                                //      const void*                                     pNext;
3004                 0u,                                                                                             //      VkFramebufferCreateFlags        flags;
3005                 renderPass,                                                                             //      VkRenderPass                            renderPass;
3006                 static_cast<deUint32>(inputAttachments.size()), //      deUint32                                        attachmentCount;
3007                 de:: dataOrNull(inputAttachments),                              //      const VkImageView*                      pAttachments;
3008                 extent.width,                                                                   //      deUint32                                        width;
3009                 extent.height,                                                                  //      deUint32                                        height;
3010                 extent.depth,                                                                   //      deUint32                                        layers;
3011         };
3012
3013         return createFramebuffer(vkd, device, &framebufferCreateInfo);
3014 }
3015
3016 tcu::TestStatus MutableTypesInstance::iterate ()
3017 {
3018         const auto device  = m_context.getDevice();
3019         const auto physDev = m_context.getPhysicalDevice();
3020         const auto qIndex  = m_context.getUniversalQueueFamilyIndex();
3021         const auto queue   = m_context.getUniversalQueue();
3022
3023         const auto& vki      = m_context.getInstanceInterface();
3024         const auto& vkd      = m_context.getDeviceInterface();
3025         auto      & alloc    = m_context.getDefaultAllocator();
3026         const auto& paramSet = m_params.descriptorSet;
3027
3028         const auto numIterations      = paramSet->maxTypes();
3029         const bool useExternalImage   = needsExternalImage(*m_params.descriptorSet);
3030         const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
3031         const auto stageFlags         = m_params.getStageFlags();
3032         const bool srcSetNeeded       = (m_params.updateType == UpdateType::COPY);
3033         const bool updateAfterBind    = (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND);
3034         const auto bindPoint          = m_params.getBindPoint();
3035         const bool rayTracing         = isRayTracingStage(m_params.testingStage);
3036         const bool useAABBs           = (m_params.testingStage == TestingStage::INTERSECTION);
3037
3038         // Resources for each iteration.
3039         std::vector<std::vector<Resource>> allResources;
3040         allResources.reserve(numIterations);
3041
3042         // Command pool.
3043         const auto cmdPool = makeCommandPool(vkd, device, qIndex);
3044
3045         // Descriptor pool and set for the active (dst) descriptor set.
3046         const auto dstPoolFlags   = m_params.getDstPoolCreateFlags();
3047         const auto dstLayoutFlags = m_params.getDstLayoutCreateFlags();
3048
3049         const auto dstPool   = paramSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, dstPoolFlags);
3050         const auto dstLayout = paramSet->makeDescriptorSetLayout(vkd, device, stageFlags, dstLayoutFlags);
3051         const auto varCount  = paramSet->getVariableDescriptorCount();
3052
3053         using VariableCountInfoPtr = de::MovePtr<VkDescriptorSetVariableDescriptorCountAllocateInfo>;
3054
3055         VariableCountInfoPtr dstVariableCountInfo;
3056         if (varCount)
3057         {
3058                 dstVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3059                 *dstVariableCountInfo = initVulkanStructure();
3060
3061                 dstVariableCountInfo->descriptorSetCount = 1u;
3062                 dstVariableCountInfo->pDescriptorCounts  = &(varCount.get());
3063         }
3064         const auto dstSet = makeDescriptorSet(vkd, device, dstPool.get(), dstLayout.get(), dstVariableCountInfo.get());
3065
3066         // Source pool and set (optional).
3067         const auto                  srcPoolFlags   = m_params.getSrcPoolCreateFlags();
3068         const auto                  srcLayoutFlags = m_params.getSrcLayoutCreateFlags();
3069         DescriptorSetPtr            iterationSrcSet;
3070         Move<VkDescriptorPool>      srcPool;
3071         Move<VkDescriptorSetLayout> srcLayout;
3072         Move<VkDescriptorSet>       srcSet;
3073
3074         // Extra set for external resources and output buffer.
3075         std::vector<Resource> extraResources;
3076         extraResources.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vkd, device, alloc, qIndex, queue, useAABBs, 0u, numIterations);
3077         if (useExternalImage)
3078                 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, vkd, device, alloc, qIndex, queue, useAABBs, getExternalSampledImageValue());
3079         if (useExternalSampler)
3080                 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLER, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3081         if (rayTracing)
3082                 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3083
3084         Move<VkDescriptorPool> extraPool;
3085         {
3086                 DescriptorPoolBuilder poolBuilder;
3087                 poolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
3088                 if (useExternalImage)
3089                         poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
3090                 if (useExternalSampler)
3091                         poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLER);
3092                 if (rayTracing)
3093                         poolBuilder.addType(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
3094                 extraPool = poolBuilder.build(vkd, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3095         }
3096
3097         Move<VkDescriptorSetLayout> extraLayout;
3098         {
3099                 DescriptorSetLayoutBuilder layoutBuilder;
3100                 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, stageFlags, nullptr);
3101                 if (useExternalImage)
3102                         layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1u, stageFlags, nullptr);
3103                 if (useExternalSampler)
3104                         layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLER, 1u, stageFlags, nullptr);
3105                 if (rayTracing)
3106                 {
3107                         // The extra acceleration structure is used from the ray generation shader only.
3108                         layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 1u, VK_SHADER_STAGE_RAYGEN_BIT_KHR, nullptr);
3109                 }
3110                 extraLayout = layoutBuilder.build(vkd, device);
3111         }
3112
3113         const auto extraSet = makeDescriptorSet(vkd, device, extraPool.get(), extraLayout.get());
3114
3115         // Update extra set.
3116         using DescriptorBufferInfoPtr = de::MovePtr<VkDescriptorBufferInfo>;
3117         using DescriptorImageInfoPtr  = de::MovePtr<VkDescriptorImageInfo>;
3118         using DescriptorASInfoPtr     = de::MovePtr<VkWriteDescriptorSetAccelerationStructureKHR>;
3119
3120         deUint32                bindingCount = 0u;
3121         DescriptorBufferInfoPtr bufferInfoPtr;
3122         DescriptorImageInfoPtr  imageInfoPtr;
3123         DescriptorImageInfoPtr  samplerInfoPtr;
3124         DescriptorASInfoPtr     asWriteInfoPtr;
3125
3126     const auto outputBufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numIterations));
3127         bufferInfoPtr = DescriptorBufferInfoPtr(new VkDescriptorBufferInfo(makeDescriptorBufferInfo(extraResources[bindingCount++].bufferWithMemory->get(), 0ull, outputBufferSize)));
3128         if (useExternalImage)
3129                 imageInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(DE_NULL, extraResources[bindingCount++].imageView.get(), VK_IMAGE_LAYOUT_GENERAL)));
3130         if (useExternalSampler)
3131                 samplerInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(extraResources[bindingCount++].sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_GENERAL)));
3132         if (rayTracing)
3133         {
3134                 asWriteInfoPtr = DescriptorASInfoPtr(new VkWriteDescriptorSetAccelerationStructureKHR);
3135                 *asWriteInfoPtr = initVulkanStructure();
3136                 asWriteInfoPtr->accelerationStructureCount  = 1u;
3137                 asWriteInfoPtr->pAccelerationStructures     = extraResources[bindingCount++].asData.tlas.get()->getPtr();
3138         }
3139
3140         {
3141                 bindingCount = 0u;
3142                 DescriptorSetUpdateBuilder updateBuilder;
3143                 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, bufferInfoPtr.get());
3144                 if (useExternalImage)
3145                         updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, imageInfoPtr.get());
3146                 if (useExternalSampler)
3147                         updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLER, samplerInfoPtr.get());
3148                 if (rayTracing)
3149                         updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, asWriteInfoPtr.get());
3150                 updateBuilder.update(vkd, device);
3151         }
3152
3153         // Push constants.
3154         const deUint32            zero    = 0u;
3155         const VkPushConstantRange pcRange = {stageFlags, 0u /*offset*/, static_cast<deUint32>(sizeof(zero)) /*size*/ };
3156
3157         // Needed for some test variants.
3158         Move<VkShaderModule> vertPassthrough;
3159         Move<VkShaderModule> tesePassthrough;
3160         Move<VkShaderModule> tescPassthrough;
3161         Move<VkShaderModule> rgenPassthrough;
3162         Move<VkShaderModule> missPassthrough;
3163
3164         if (m_params.testingStage == TestingStage::FRAGMENT
3165                 || m_params.testingStage == TestingStage::GEOMETRY
3166                 || m_params.testingStage == TestingStage::TESS_CONTROL
3167                 || m_params.testingStage == TestingStage::TESS_EVAL)
3168         {
3169                 vertPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("vert"), 0u);
3170         }
3171
3172         if (m_params.testingStage == TestingStage::TESS_CONTROL)
3173         {
3174                 tesePassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tese"), 0u);
3175         }
3176
3177         if (m_params.testingStage == TestingStage::TESS_EVAL)
3178         {
3179                 tescPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tesc"), 0u);
3180         }
3181
3182         if (m_params.testingStage == TestingStage::CLOSEST_HIT
3183                 || m_params.testingStage == TestingStage::ANY_HIT
3184                 || m_params.testingStage == TestingStage::INTERSECTION
3185                 || m_params.testingStage == TestingStage::MISS
3186                 || m_params.testingStage == TestingStage::CALLABLE)
3187         {
3188                 rgenPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("rgen"), 0u);
3189         }
3190
3191         if (m_params.testingStage == TestingStage::INTERSECTION)
3192         {
3193                 missPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("miss"), 0u);
3194         }
3195
3196         for (deUint32 iteration = 0u; iteration < numIterations; ++iteration)
3197         {
3198                 // Generate source set for the current iteration.
3199                 if (srcSetNeeded)
3200                 {
3201                         // Free previous descriptor set before rebuilding the pool.
3202                         srcSet          = Move<VkDescriptorSet>();
3203                         iterationSrcSet = paramSet->genSourceSet(m_params.sourceSetStrategy, iteration);
3204                         srcPool         = iterationSrcSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, srcPoolFlags);
3205                         srcLayout       = iterationSrcSet->makeDescriptorSetLayout(vkd, device, stageFlags, srcLayoutFlags);
3206
3207                         const auto srcVarCount = iterationSrcSet->getVariableDescriptorCount();
3208                         VariableCountInfoPtr srcVariableCountInfo;
3209
3210                         if (srcVarCount)
3211                         {
3212                                 srcVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3213                                 *srcVariableCountInfo = initVulkanStructure();
3214
3215                                 srcVariableCountInfo->descriptorSetCount = 1u;
3216                                 srcVariableCountInfo->pDescriptorCounts = &(srcVarCount.get());
3217                         }
3218
3219                         srcSet = makeDescriptorSet(vkd, device, srcPool.get(), srcLayout.get(), srcVariableCountInfo.get());
3220                 }
3221
3222                 // Set layouts and sets used in the pipeline.
3223                 const std::vector<VkDescriptorSetLayout> setLayouts = {dstLayout.get(), extraLayout.get()};
3224                 const std::vector<VkDescriptorSet>       usedSets   = {dstSet.get(), extraSet.get()};
3225
3226                 // Create resources.
3227                 allResources.emplace_back(paramSet->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs));
3228                 const auto& resources = allResources.back();
3229
3230                 // Make pipeline for the current iteration.
3231                 const auto pipelineLayout = makePipelineLayout(vkd, device, static_cast<deUint32>(setLayouts.size()), de::dataOrNull(setLayouts), 1u, &pcRange);
3232                 const auto moduleName     = shaderName(iteration);
3233                 const auto shaderModule   = createShaderModule(vkd, device, m_context.getBinaryCollection().get(moduleName), 0u);
3234
3235                 Move<VkPipeline>     pipeline;
3236                 Move<VkRenderPass>   renderPass;
3237                 Move<VkFramebuffer>  framebuffer;
3238
3239                 deUint32 shaderGroupHandleSize          = 0u;
3240                 deUint32 shaderGroupBaseAlignment       = 1u;
3241
3242                 de::MovePtr<BufferWithMemory>   raygenSBT;
3243                 de::MovePtr<BufferWithMemory>   missSBT;
3244                 de::MovePtr<BufferWithMemory>   hitSBT;
3245                 de::MovePtr<BufferWithMemory>   callableSBT;
3246
3247                 VkStridedDeviceAddressRegionKHR raygenSBTRegion         = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3248                 VkStridedDeviceAddressRegionKHR missSBTRegion           = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3249                 VkStridedDeviceAddressRegionKHR hitSBTRegion            = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3250                 VkStridedDeviceAddressRegionKHR callableSBTRegion       = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3251
3252                 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3253                         pipeline = makeComputePipeline(vkd, device, pipelineLayout.get(), 0u, shaderModule.get(), 0u, nullptr);
3254                 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3255                 {
3256                         VkShaderModule vertModule = DE_NULL;
3257                         VkShaderModule teseModule = DE_NULL;
3258                         VkShaderModule tescModule = DE_NULL;
3259                         VkShaderModule geomModule = DE_NULL;
3260                         VkShaderModule fragModule = DE_NULL;
3261
3262                         if (m_params.testingStage == TestingStage::VERTEX)
3263                                 vertModule = shaderModule.get();
3264                         else if (m_params.testingStage == TestingStage::FRAGMENT)
3265                         {
3266                                 vertModule = vertPassthrough.get();
3267                                 fragModule = shaderModule.get();
3268                         }
3269                         else if (m_params.testingStage == TestingStage::GEOMETRY)
3270                         {
3271                                 vertModule = vertPassthrough.get();
3272                                 geomModule = shaderModule.get();
3273                         }
3274                         else if (m_params.testingStage == TestingStage::TESS_CONTROL)
3275                         {
3276                                 vertModule = vertPassthrough.get();
3277                                 teseModule = tesePassthrough.get();
3278                                 tescModule = shaderModule.get();
3279                         }
3280                         else if (m_params.testingStage == TestingStage::TESS_EVAL)
3281                         {
3282                                 vertModule = vertPassthrough.get();
3283                                 tescModule = tescPassthrough.get();
3284                                 teseModule = shaderModule.get();
3285                         }
3286                         else
3287                                 DE_ASSERT(false);
3288
3289                         renderPass  = buildRenderPass(vkd, device, resources);
3290                         pipeline    = buildGraphicsPipeline(vkd, device, pipelineLayout.get(), vertModule, tescModule, teseModule, geomModule, fragModule, renderPass.get());
3291                         framebuffer = buildFramebuffer(vkd, device, renderPass.get(), resources);
3292                 }
3293                 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3294                 {
3295                         const auto rayTracingPipeline       = de::newMovePtr<RayTracingPipeline>();
3296                         const auto rayTracingPropertiesKHR      = makeRayTracingProperties(vki, physDev);
3297                         shaderGroupHandleSize                           = rayTracingPropertiesKHR->getShaderGroupHandleSize();
3298                         shaderGroupBaseAlignment                        = rayTracingPropertiesKHR->getShaderGroupBaseAlignment();
3299
3300                         VkShaderModule rgenModule = DE_NULL;
3301                         VkShaderModule isecModule = DE_NULL;
3302                         VkShaderModule ahitModule = DE_NULL;
3303                         VkShaderModule chitModule = DE_NULL;
3304                         VkShaderModule missModule = DE_NULL;
3305                         VkShaderModule callModule = DE_NULL;
3306
3307                         const deUint32  rgenGroup   = 0u;
3308                         deUint32        hitGroup    = 0u;
3309                         deUint32        missGroup   = 0u;
3310                         deUint32        callGroup   = 0u;
3311
3312                         if (m_params.testingStage == TestingStage::RAY_GEN)
3313                         {
3314                                 rgenModule = shaderModule.get();
3315                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3316                         }
3317                         else if (m_params.testingStage == TestingStage::INTERSECTION)
3318                         {
3319                                 hitGroup   = 1u;
3320                                 missGroup  = 2u;
3321                                 rgenModule = rgenPassthrough.get();
3322                                 missModule = missPassthrough.get();
3323                                 isecModule = shaderModule.get();
3324                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3325                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_INTERSECTION_BIT_KHR, isecModule, hitGroup);
3326                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3327                         }
3328                         else if (m_params.testingStage == TestingStage::ANY_HIT)
3329                         {
3330                                 hitGroup   = 1u;
3331                                 rgenModule = rgenPassthrough.get();
3332                                 ahitModule = shaderModule.get();
3333                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3334                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_ANY_HIT_BIT_KHR, ahitModule, hitGroup);
3335                         }
3336                         else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
3337                         {
3338                                 hitGroup   = 1u;
3339                                 rgenModule = rgenPassthrough.get();
3340                                 chitModule = shaderModule.get();
3341                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3342                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, chitModule, hitGroup);
3343                         }
3344                         else if (m_params.testingStage == TestingStage::MISS)
3345                         {
3346                                 missGroup  = 1u;
3347                                 rgenModule = rgenPassthrough.get();
3348                                 missModule = shaderModule.get();
3349                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3350                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3351                         }
3352                         else if (m_params.testingStage == TestingStage::CALLABLE)
3353                         {
3354                                 callGroup  = 1u;
3355                                 rgenModule = rgenPassthrough.get();
3356                                 callModule = shaderModule.get();
3357                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3358                                 rayTracingPipeline->addShader(VK_SHADER_STAGE_CALLABLE_BIT_KHR, callModule, callGroup);
3359                         }
3360                         else
3361                                 DE_ASSERT(false);
3362
3363                         pipeline = rayTracingPipeline->createPipeline(vkd, device, pipelineLayout.get());
3364
3365                         raygenSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, rgenGroup, 1u);
3366                         raygenSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, raygenSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3367
3368                         if (missGroup > 0u)
3369                         {
3370                                 missSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, missGroup, 1u);
3371                                 missSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, missSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3372                         }
3373
3374                         if (hitGroup > 0u)
3375                         {
3376                                 hitSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, hitGroup, 1u);
3377                                 hitSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, hitSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3378                         }
3379
3380                         if (callGroup > 0u)
3381                         {
3382                                 callableSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, callGroup, 1u);
3383                                 callableSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, callableSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3384                         }
3385                 }
3386                 else
3387                         DE_ASSERT(false);
3388
3389                 // Command buffer for the current iteration.
3390                 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3391                 const auto cmdBuffer    = cmdBufferPtr.get();
3392
3393                 beginCommandBuffer(vkd, cmdBuffer);
3394
3395                 const Step steps[] = {
3396                         (updateAfterBind ? Step::BIND : Step::UPDATE),
3397                         (updateAfterBind ? Step::UPDATE : Step::BIND)
3398                 };
3399
3400                 for (const auto& step : steps)
3401                 {
3402                         if (step == Step::BIND)
3403                         {
3404                                 vkd.cmdBindPipeline(cmdBuffer, bindPoint, pipeline.get());
3405                                 vkd.cmdBindDescriptorSets(cmdBuffer, bindPoint, pipelineLayout.get(), 0u, static_cast<deUint32>(usedSets.size()), de::dataOrNull(usedSets), 0u, nullptr);
3406                         }
3407                         else // Step::UPDATE
3408                         {
3409                                 if (srcSetNeeded)
3410                                 {
3411                                         // Note: these operations need to be called on paramSet and not iterationSrcSet. The latter is a compatible set
3412                                         // that's correct and contains compatible bindings but, when a binding has been changed from non-mutable to
3413                                         // mutable or to an extended mutable type, the list of descriptor types for the mutable bindings in
3414                                         // iterationSrcSet are not in iteration order like they are in the original set and must not be taken into
3415                                         // account to update or copy sets.
3416                                         paramSet->updateDescriptorSet(vkd, device, srcSet.get(), iteration, resources);
3417                                         paramSet->copyDescriptorSet(vkd, device, srcSet.get(), dstSet.get());
3418                                 }
3419                                 else
3420                                 {
3421                                         paramSet->updateDescriptorSet(vkd, device, dstSet.get(), iteration, resources);
3422                                 }
3423                         }
3424                 }
3425
3426                 // Run shader.
3427                 vkd.cmdPushConstants(cmdBuffer, pipelineLayout.get(), stageFlags, 0u, static_cast<deUint32>(sizeof(zero)), &zero);
3428
3429                 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3430                         vkd.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
3431                 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3432                 {
3433                         const auto extent     = getDefaultExtent();
3434                         const auto renderArea = makeRect2D(extent);
3435
3436                         beginRenderPass(vkd, cmdBuffer, renderPass.get(), framebuffer.get(), renderArea);
3437                         vkd.cmdDraw(cmdBuffer, 3u, 1u, 0u, 0u);
3438                         endRenderPass(vkd, cmdBuffer);
3439                 }
3440                 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3441                 {
3442                         vkd.cmdTraceRaysKHR(cmdBuffer, &raygenSBTRegion, &missSBTRegion, &hitSBTRegion, &callableSBTRegion, 1u, 1u, 1u);
3443                 }
3444                 else
3445                         DE_ASSERT(false);
3446
3447                 endCommandBuffer(vkd, cmdBuffer);
3448                 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
3449
3450                 // Verify output buffer.
3451                 {
3452                         const auto outputBufferVal = extraResources[0].getStoredValue(vkd, device, alloc, qIndex, queue, iteration);
3453                         DE_ASSERT(static_cast<bool>(outputBufferVal));
3454
3455                         const auto expectedValue = getExpectedOutputBufferValue();
3456                         if (outputBufferVal.get() != expectedValue)
3457                         {
3458                                 std::ostringstream msg;
3459                                 msg << "Iteration " << iteration << ": unexpected value found in output buffer (expected " << expectedValue << " and found " << outputBufferVal.get() << ")";
3460                                 TCU_FAIL(msg.str());
3461                         }
3462                 }
3463
3464                 // Verify descriptor writes.
3465                 {
3466                         size_t     resourcesOffset = 0;
3467                         const auto writeMask       = getStoredValueMask();
3468                         const auto numBindings     = paramSet->numBindings();
3469
3470                         for (deUint32 bindingIdx = 0u; bindingIdx < numBindings; ++bindingIdx)
3471                         {
3472                                 const auto binding = paramSet->getBinding(bindingIdx);
3473                                 const auto bindingTypes = binding->typesAtIteration(iteration);
3474
3475                                 for (size_t descriptorIdx = 0; descriptorIdx < bindingTypes.size(); ++descriptorIdx)
3476                                 {
3477                                         const auto& descriptorType = bindingTypes[descriptorIdx];
3478                                         if (!isShaderWritable(descriptorType))
3479                                                 continue;
3480
3481                                         const auto& resource        = resources[resourcesOffset + descriptorIdx];
3482                                         const auto  initialValue    = resource.initialValue;
3483                                         const auto  storedValuePtr  = resource.getStoredValue(vkd, device, alloc, qIndex, queue);
3484
3485                                         DE_ASSERT(static_cast<bool>(storedValuePtr));
3486                                         const auto storedValue   = storedValuePtr.get();
3487                                         const auto expectedValue = (initialValue | writeMask);
3488                                         if (expectedValue != storedValue)
3489                                         {
3490                                                 std::ostringstream msg;
3491                                                 msg << "Iteration " << iteration << ": descriptor at binding " << bindingIdx << " index " << descriptorIdx
3492                                                     << " with type " << de::toString(descriptorType) << " contains unexpected value " << std::hex
3493                                                         << storedValue << " (expected " << expectedValue << ")";
3494                                                 TCU_FAIL(msg.str());
3495                                         }
3496                                 }
3497
3498                                 resourcesOffset += bindingTypes.size();
3499                         }
3500                 }
3501         }
3502
3503         return tcu::TestStatus::pass("Pass");
3504 }
3505
3506 using GroupPtr = de::MovePtr<tcu::TestCaseGroup>;
3507
3508 void createMutableTestVariants (tcu::TestContext& testCtx, tcu::TestCaseGroup* parentGroup, const DescriptorSetPtr& descriptorSet, const std::vector<TestingStage>& stagesToTest)
3509 {
3510         const struct
3511         {
3512                 UpdateType  updateType;
3513                 const char* name;
3514         } updateTypes[] = {
3515                 {UpdateType::WRITE, "update_write"},
3516                 {UpdateType::COPY,  "update_copy"},
3517         };
3518
3519         const struct
3520         {
3521                 SourceSetStrategy   sourceSetStrategy;
3522                 const char*         name;
3523         } sourceStrategies[] = {
3524                 {SourceSetStrategy::MUTABLE,    "mutable_source"},
3525                 {SourceSetStrategy::NONMUTABLE, "nonmutable_source"},
3526                 {SourceSetStrategy::NO_SOURCE,  "no_source"},
3527         };
3528
3529         const struct
3530         {
3531                 SourceSetType   sourceSetType;
3532                 const char*     name;
3533         } sourceTypes[] = {
3534                 {SourceSetType::NORMAL,    "normal_source"},
3535                 {SourceSetType::HOST_ONLY, "host_only_source"},
3536                 {SourceSetType::NO_SOURCE, "no_source"},
3537         };
3538
3539         const struct
3540         {
3541                 PoolMutableStrategy poolMutableStrategy;
3542                 const char*         name;
3543         } poolStrategies[] = {
3544                 {PoolMutableStrategy::KEEP_TYPES,   "pool_same_types"},
3545                 {PoolMutableStrategy::NO_TYPES,     "pool_no_types"},
3546                 {PoolMutableStrategy::EXPAND_TYPES, "pool_expand_types"},
3547         };
3548
3549         const struct
3550         {
3551                 UpdateMoment    updateMoment;
3552                 const char*     name;
3553         } updateMoments[] = {
3554                 {UpdateMoment::NORMAL,            "pre_update"},
3555                 {UpdateMoment::UPDATE_AFTER_BIND, "update_after_bind"},
3556         };
3557
3558         const struct
3559         {
3560                 ArrayAccessType arrayAccessType;
3561                 const char*     name;
3562         } arrayAccessTypes[] = {
3563                 {ArrayAccessType::CONSTANT,      "index_constant"},
3564                 {ArrayAccessType::PUSH_CONSTANT, "index_push_constant"},
3565                 {ArrayAccessType::NO_ARRAY,      "no_array"},
3566         };
3567
3568         const struct StageAndName
3569         {
3570                 TestingStage    testingStage;
3571                 const char*     name;
3572         } testStageList[] = {
3573                 {TestingStage::COMPUTE,      "comp"},
3574                 {TestingStage::VERTEX,       "vert"},
3575                 {TestingStage::TESS_CONTROL, "tesc"},
3576                 {TestingStage::TESS_EVAL,    "tese"},
3577                 {TestingStage::GEOMETRY,     "geom"},
3578                 {TestingStage::FRAGMENT,     "frag"},
3579                 {TestingStage::RAY_GEN,      "rgen"},
3580                 {TestingStage::INTERSECTION, "isec"},
3581                 {TestingStage::ANY_HIT,      "ahit"},
3582                 {TestingStage::CLOSEST_HIT,  "chit"},
3583                 {TestingStage::MISS,         "miss"},
3584                 {TestingStage::CALLABLE,     "call"},
3585         };
3586
3587         const bool hasArrays           = descriptorSet->hasArrays();
3588         const bool hasInputAttachments = usesInputAttachments(*descriptorSet);
3589
3590         for (const auto& ut : updateTypes)
3591         {
3592                 GroupPtr updateGroup(new tcu::TestCaseGroup(testCtx, ut.name, ""));
3593
3594                 for (const auto& srcStrategy : sourceStrategies)
3595                 {
3596                         // Skip combinations that make no sense.
3597                         if (ut.updateType == UpdateType::WRITE && srcStrategy.sourceSetStrategy != SourceSetStrategy::NO_SOURCE)
3598                                 continue;
3599
3600                         if (ut.updateType == UpdateType::COPY && srcStrategy.sourceSetStrategy == SourceSetStrategy::NO_SOURCE)
3601                                 continue;
3602
3603                         if (srcStrategy.sourceSetStrategy == SourceSetStrategy::NONMUTABLE && descriptorSet->needsAnyAliasing())
3604                                 continue;
3605
3606                         GroupPtr srcStrategyGroup(new tcu::TestCaseGroup(testCtx, srcStrategy.name, ""));
3607
3608                         for (const auto& srcType : sourceTypes)
3609                         {
3610                                 // Skip combinations that make no sense.
3611                                 if (ut.updateType == UpdateType::WRITE && srcType.sourceSetType != SourceSetType::NO_SOURCE)
3612                                         continue;
3613
3614                                 if (ut.updateType == UpdateType::COPY && srcType.sourceSetType == SourceSetType::NO_SOURCE)
3615                                         continue;
3616
3617                                 GroupPtr srcTypeGroup(new tcu::TestCaseGroup(testCtx, srcType.name, ""));
3618
3619                                 for (const auto& poolStrategy: poolStrategies)
3620                                 {
3621                                         GroupPtr poolStrategyGroup(new tcu::TestCaseGroup(testCtx, poolStrategy.name, ""));
3622
3623                                         for (const auto& moment : updateMoments)
3624                                         {
3625                                                 //if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && srcType.sourceSetType == SourceSetType::HOST_ONLY)
3626                                                 //      continue;
3627
3628                                                 if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && hasInputAttachments)
3629                                                         continue;
3630
3631                                                 GroupPtr momentGroup(new tcu::TestCaseGroup(testCtx, moment.name, ""));
3632
3633                                                 for (const auto& accessType : arrayAccessTypes)
3634                                                 {
3635                                                         // Skip combinations that make no sense.
3636                                                         if (hasArrays && accessType.arrayAccessType == ArrayAccessType::NO_ARRAY)
3637                                                                 continue;
3638
3639                                                         if (!hasArrays && accessType.arrayAccessType != ArrayAccessType::NO_ARRAY)
3640                                                                 continue;
3641
3642                                                         GroupPtr accessTypeGroup(new tcu::TestCaseGroup(testCtx, accessType.name, ""));
3643
3644                                                         for (const auto& testStage : stagesToTest)
3645                                                         {
3646                                                                 const auto beginItr = std::begin(testStageList);
3647                                                                 const auto endItr   = std::end(testStageList);
3648                                                                 const auto iter     = std::find_if(beginItr, endItr, [testStage] (const StageAndName& ts) { return ts.testingStage == testStage; });
3649
3650                                                                 DE_ASSERT(iter != endItr);
3651                                                                 const auto& stage = *iter;
3652
3653                                                                 if (hasInputAttachments && stage.testingStage != TestingStage::FRAGMENT)
3654                                                                         continue;
3655
3656                                                                 TestParams params = {
3657                                                                         descriptorSet,
3658                                                                         ut.updateType,
3659                                                                         srcStrategy.sourceSetStrategy,
3660                                                                         srcType.sourceSetType,
3661                                                                         poolStrategy.poolMutableStrategy,
3662                                                                         moment.updateMoment,
3663                                                                         accessType.arrayAccessType,
3664                                                                         stage.testingStage,
3665                                                                 };
3666
3667                                                                 accessTypeGroup->addChild(new MutableTypesTest(testCtx, stage.name, "", params));
3668                                                         }
3669
3670                                                         momentGroup->addChild(accessTypeGroup.release());
3671                                                 }
3672
3673                                                 poolStrategyGroup->addChild(momentGroup.release());
3674                                         }
3675
3676                                         srcTypeGroup->addChild(poolStrategyGroup.release());
3677                                 }
3678
3679                                 srcStrategyGroup->addChild(srcTypeGroup.release());
3680                         }
3681
3682                         updateGroup->addChild(srcStrategyGroup.release());
3683                 }
3684
3685                 parentGroup->addChild(updateGroup.release());
3686         }
3687 }
3688
3689 }
3690
3691 std::string descriptorTypeStr (VkDescriptorType descriptorType)
3692 {
3693         static const auto prefixLen = std::string("VK_DESCRIPTOR_TYPE_").size();
3694         return de::toLower(de::toString(descriptorType).substr(prefixLen));
3695 }
3696
3697 tcu::TestCaseGroup* createDescriptorValveMutableTests (tcu::TestContext& testCtx)
3698 {
3699         GroupPtr mainGroup(new tcu::TestCaseGroup(testCtx, "mutable_descriptor", "Tests for VK_VALVE_mutable_descriptor_type"));
3700
3701         const VkDescriptorType basicDescriptorTypes[] = {
3702                 VK_DESCRIPTOR_TYPE_SAMPLER,
3703                 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
3704                 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
3705                 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
3706                 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
3707                 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
3708                 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
3709                 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
3710                 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
3711                 VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
3712         };
3713
3714         static const auto mandatoryTypes = getMandatoryMutableTypes();
3715
3716         using StageVec = std::vector<TestingStage>;
3717
3718         const StageVec allStages =
3719         {
3720                 TestingStage::COMPUTE,
3721                 TestingStage::VERTEX,
3722                 TestingStage::TESS_CONTROL,
3723                 TestingStage::TESS_EVAL,
3724                 TestingStage::GEOMETRY,
3725                 TestingStage::FRAGMENT,
3726                 TestingStage::RAY_GEN,
3727                 TestingStage::INTERSECTION,
3728                 TestingStage::ANY_HIT,
3729                 TestingStage::CLOSEST_HIT,
3730                 TestingStage::MISS,
3731                 TestingStage::CALLABLE,
3732         };
3733
3734         const StageVec reducedStages =
3735         {
3736                 TestingStage::COMPUTE,
3737                 TestingStage::VERTEX,
3738                 TestingStage::FRAGMENT,
3739                 TestingStage::RAY_GEN,
3740         };
3741
3742         const StageVec computeOnly =
3743         {
3744                 TestingStage::COMPUTE,
3745         };
3746
3747         // Basic tests with a single mutable descriptor.
3748         {
3749                 GroupPtr singleCases(new tcu::TestCaseGroup(testCtx, "single", "Basic mutable descriptor tests with a single mutable descriptor"));
3750
3751                 for (const auto& descriptorType : basicDescriptorTypes)
3752                 {
3753                         const auto                          groupName = descriptorTypeStr(descriptorType);
3754                         const std::vector<VkDescriptorType> actualTypes(1u, descriptorType);
3755
3756                         DescriptorSetPtr setPtr;
3757                         {
3758                                 DescriptorSet::BindingPtrVector setBindings;
3759                                 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, actualTypes));
3760                                 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3761                         }
3762
3763                         GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3764                         createMutableTestVariants(testCtx, subGroup.get(), setPtr, allStages);
3765
3766                         singleCases->addChild(subGroup.release());
3767                 }
3768
3769                 // Case with a single descriptor that iterates several types.
3770                 {
3771                         DescriptorSetPtr setPtr;
3772                         {
3773                                 DescriptorSet::BindingPtrVector setBindings;
3774                                 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mandatoryTypes));
3775                                 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3776                         }
3777
3778                         GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "all_mandatory", ""));
3779                         createMutableTestVariants(testCtx, subGroup.get(), setPtr, reducedStages);
3780
3781                         singleCases->addChild(subGroup.release());
3782                 }
3783
3784                 // Cases that try to verify switching from any descriptor type to any other is possible.
3785                 {
3786                         GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "switches", "Test switching from one to another descriptor type works as expected"));
3787
3788                         for (const auto& initialDescriptorType : basicDescriptorTypes)
3789                         {
3790                                 for (const auto& finalDescriptorType : basicDescriptorTypes)
3791                                 {
3792                                         if (initialDescriptorType == finalDescriptorType)
3793                                                 continue;
3794
3795                                         const std::vector<VkDescriptorType> mutableTypes { initialDescriptorType, finalDescriptorType };
3796                                         DescriptorSet::BindingPtrVector setBindings;
3797                                         setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mutableTypes));
3798
3799                                         DescriptorSetPtr setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3800
3801                                         const auto groupName = descriptorTypeStr(initialDescriptorType) + "_" + descriptorTypeStr(finalDescriptorType);
3802                                         GroupPtr combinationGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3803                                         createMutableTestVariants(testCtx, combinationGroup.get(), setPtr, reducedStages);
3804                                         subGroup->addChild(combinationGroup.release());
3805                                 }
3806                         }
3807
3808                         singleCases->addChild(subGroup.release());
3809                 }
3810
3811                 mainGroup->addChild(singleCases.release());
3812         }
3813
3814         // Cases with a single non-mutable descriptor. This provides some basic checks to verify copying to non-mutable bindings works.
3815         {
3816                 GroupPtr singleNonMutableGroup (new tcu::TestCaseGroup(testCtx, "single_nonmutable", "Tests using a single non-mutable descriptor"));
3817
3818                 for (const auto& descriptorType : basicDescriptorTypes)
3819                 {
3820                         DescriptorSet::BindingPtrVector bindings;
3821                         bindings.emplace_back(new SingleBinding(descriptorType, std::vector<VkDescriptorType>()));
3822                         DescriptorSetPtr descriptorSet (new DescriptorSet(bindings));
3823
3824                         const auto groupName = descriptorTypeStr(descriptorType);
3825                         GroupPtr descGroup (new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3826
3827                         createMutableTestVariants(testCtx, descGroup.get(), descriptorSet, reducedStages);
3828                         singleNonMutableGroup->addChild(descGroup.release());
3829                 }
3830
3831                 mainGroup->addChild(singleNonMutableGroup.release());
3832         }
3833
3834         const struct {
3835                 bool unbounded;
3836                 const char* name;
3837         } unboundedCases[] = {
3838                 {false, "constant_size"},
3839                 {true,  "unbounded"},
3840         };
3841
3842         const struct {
3843                 bool aliasing;
3844                 const char* name;
3845         } aliasingCases[] = {
3846                 {false, "noaliasing"},
3847                 {true,  "aliasing"},
3848         };
3849
3850         const struct {
3851                 bool oneArrayOnly;
3852                 bool mixNonMutable;
3853                 const char* groupName;
3854                 const char* groupDesc;
3855         } arrayCountGroups[] = {
3856                 {true,  false, "one_array",             "Tests using an array of mutable descriptors"},
3857                 {false, false, "multiple_arrays",       "Tests using multiple arrays of mutable descriptors"},
3858                 {false, true,  "multiple_arrays_mixed", "Tests using multiple arrays of mutable descriptors mixed with arrays of nonmutable ones"},
3859         };
3860
3861         for (const auto& variant : arrayCountGroups)
3862         {
3863                 GroupPtr arrayGroup(new tcu::TestCaseGroup(testCtx, variant.groupName, variant.groupDesc));
3864
3865                 for (const auto& unboundedCase : unboundedCases)
3866                 {
3867                         GroupPtr unboundedGroup(new tcu::TestCaseGroup(testCtx, unboundedCase.name, ""));
3868
3869                         for (const auto& aliasingCase : aliasingCases)
3870                         {
3871                                 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
3872
3873                                 DescriptorSet::BindingPtrVector setBindings;
3874
3875                                 // Prepare descriptors for this test variant.
3876                                 for (size_t mandatoryTypesRotation = 0; mandatoryTypesRotation < mandatoryTypes.size(); ++mandatoryTypesRotation)
3877                                 {
3878                                         const bool isLastBinding = (variant.oneArrayOnly || mandatoryTypesRotation == mandatoryTypes.size() - 1u);
3879                                         const bool isUnbounded   = (unboundedCase.unbounded && isLastBinding);
3880
3881                                         // Create a rotation of the mandatory types for each mutable array binding.
3882                                         auto mandatoryTypesVector = mandatoryTypes;
3883                                         {
3884                                                 const auto beginPtr = &mandatoryTypesVector[0];
3885                                                 const auto endPtr   = beginPtr + mandatoryTypesVector.size();
3886                                                 std::rotate(beginPtr, &mandatoryTypesVector[mandatoryTypesRotation], endPtr);
3887                                         }
3888
3889                                         std::vector<SingleBinding> arrayBindings;
3890
3891                                         if (aliasingCase.aliasing)
3892                                         {
3893                                                 // With aliasing, the descriptor types rotate in each descriptor.
3894                                                 for (size_t typeIdx = 0; typeIdx < mandatoryTypesVector.size(); ++typeIdx)
3895                                                 {
3896                                                         auto       rotatedTypes = mandatoryTypesVector;
3897                                                         const auto beginPtr     = &rotatedTypes[0];
3898                                                         const auto endPtr       = beginPtr + rotatedTypes.size();
3899
3900                                                         std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
3901
3902                                                         arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes);
3903                                                 }
3904                                         }
3905                                         else
3906                                         {
3907                                                 // Without aliasing, all descriptors use the same type at the same time.
3908                                                 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mandatoryTypesVector);
3909                                                 arrayBindings.resize(mandatoryTypesVector.size(), noAliasingBinding);
3910                                         }
3911
3912                                         setBindings.emplace_back(new ArrayBinding(isUnbounded, arrayBindings));
3913
3914                                         if (variant.mixNonMutable && !isUnbounded)
3915                                         {
3916                                                 // Create a non-mutable array binding interleaved with the other ones.
3917                                                 const SingleBinding nonMutableBinding(mandatoryTypes[mandatoryTypesRotation], std::vector<VkDescriptorType>());
3918                                                 std::vector<SingleBinding> nonMutableBindings(mandatoryTypes.size(), nonMutableBinding);
3919                                                 setBindings.emplace_back(new ArrayBinding(false, nonMutableBindings));
3920                                         }
3921
3922                                         if (variant.oneArrayOnly)
3923                                                 break;
3924                                 }
3925
3926                                 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
3927                                 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
3928
3929                                 unboundedGroup->addChild(aliasingGroup.release());
3930                         }
3931
3932                         arrayGroup->addChild(unboundedGroup.release());
3933                 }
3934
3935                 mainGroup->addChild(arrayGroup.release());
3936         }
3937
3938         // Cases with a single mutable binding followed by an array of mutable bindings.
3939         // The array will use a single type beyond the mandatory ones.
3940         {
3941                 GroupPtr singleAndArrayGroup(new tcu::TestCaseGroup(testCtx, "single_and_array", "Tests using a single mutable binding followed by a mutable array binding"));
3942
3943                 for (const auto& descriptorType : basicDescriptorTypes)
3944                 {
3945                         // Input attachments will not use arrays.
3946                         if (descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
3947                                 continue;
3948
3949                         if (de::contains(begin(mandatoryTypes), end(mandatoryTypes), descriptorType))
3950                                 continue;
3951
3952                         const auto groupName = descriptorTypeStr(descriptorType);
3953                         GroupPtr descTypeGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3954
3955                         for (const auto& aliasingCase : aliasingCases)
3956                         {
3957                                 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
3958
3959                                 DescriptorSet::BindingPtrVector setBindings;
3960                                 std::vector<SingleBinding> arrayBindings;
3961
3962                                 // Add single type beyond the mandatory ones.
3963                                 auto arrayBindingDescTypes = mandatoryTypes;
3964                                 arrayBindingDescTypes.push_back(descriptorType);
3965
3966                                 // Single mutable descriptor as the first binding.
3967                                 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, arrayBindingDescTypes));
3968
3969                                 // Descriptor array as the second binding.
3970                                 if (aliasingCase.aliasing)
3971                                 {
3972                                         // With aliasing, the descriptor types rotate in each descriptor.
3973                                         for (size_t typeIdx = 0; typeIdx < arrayBindingDescTypes.size(); ++typeIdx)
3974                                         {
3975                                                 auto       rotatedTypes = arrayBindingDescTypes;
3976                                                 const auto beginPtr     = &rotatedTypes[0];
3977                                                 const auto endPtr       = beginPtr + rotatedTypes.size();
3978
3979                                                 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
3980
3981                                                 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes);
3982                                         }
3983                                 }
3984                                 else
3985                                 {
3986                                         // Without aliasing, all descriptors use the same type at the same time.
3987                                         const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, arrayBindingDescTypes);
3988                                         arrayBindings.resize(arrayBindingDescTypes.size(), noAliasingBinding);
3989                                 }
3990
3991                                 // Second binding: array binding.
3992                                 setBindings.emplace_back(new ArrayBinding(false/*unbounded*/, arrayBindings));
3993
3994                                 // Create set and test variants.
3995                                 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
3996                                 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
3997
3998                                 descTypeGroup->addChild(aliasingGroup.release());
3999                         }
4000
4001                         singleAndArrayGroup->addChild(descTypeGroup.release());
4002                 }
4003
4004                 mainGroup->addChild(singleAndArrayGroup.release());
4005         }
4006
4007         // Cases with several mutable non-array bindings.
4008         {
4009                 GroupPtr multipleGroup    (new tcu::TestCaseGroup(testCtx, "multiple", "Tests using multiple mutable bindings"));
4010                 GroupPtr mutableOnlyGroup (new tcu::TestCaseGroup(testCtx, "mutable_only", "Tests using only mutable descriptors"));
4011                 GroupPtr mixedGroup       (new tcu::TestCaseGroup(testCtx, "mixed", "Tests mixing mutable descriptors an non-mutable descriptors"));
4012
4013                 // Each descriptor will have a different type in every iteration, like in the one_array aliasing case.
4014                 for (int groupIdx = 0; groupIdx < 2; ++groupIdx)
4015                 {
4016                         const bool mixed = (groupIdx == 1);
4017                         DescriptorSet::BindingPtrVector setBindings;
4018
4019                         for (size_t typeIdx = 0; typeIdx < mandatoryTypes.size(); ++typeIdx)
4020                         {
4021                                 auto       rotatedTypes = mandatoryTypes;
4022                                 const auto beginPtr     = &rotatedTypes[0];
4023                                 const auto endPtr       = beginPtr + rotatedTypes.size();
4024
4025                                 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4026                                 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes));
4027
4028                                 // Additional non-mutable binding interleaved with the mutable ones.
4029                                 if (mixed)
4030                                         setBindings.emplace_back(new SingleBinding(rotatedTypes[0], std::vector<VkDescriptorType>()));
4031                         }
4032                         DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4033
4034                         const auto dstGroup = (mixed ? mixedGroup.get() : mutableOnlyGroup.get());
4035                         createMutableTestVariants(testCtx, dstGroup, descriptorSet, computeOnly);
4036                 }
4037
4038                 multipleGroup->addChild(mutableOnlyGroup.release());
4039                 multipleGroup->addChild(mixedGroup.release());
4040                 mainGroup->addChild(multipleGroup.release());
4041         }
4042
4043         return mainGroup.release();
4044 }
4045
4046 } // BindingModel
4047 } // vkt