1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2021 The Khronos Group Inc.
6 * Copyright (c) 2021 Valve Corporation.
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
12 * http://www.apache.org/licenses/LICENSE-2.0
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
22 * \brief Tests for VK_VALVE_mutable_descriptor_type.
23 *//*--------------------------------------------------------------------*/
24 #include "vktBindingValveMutableTests.hpp"
25 #include "vktTestCase.hpp"
28 #include "vkRefUtil.hpp"
29 #include "vkQueryUtil.hpp"
30 #include "vkImageWithMemory.hpp"
31 #include "vkBufferWithMemory.hpp"
32 #include "vkTypeUtil.hpp"
33 #include "vkObjUtil.hpp"
34 #include "vkBarrierUtil.hpp"
35 #include "vkCmdUtil.hpp"
36 #include "vkBuilderUtil.hpp"
37 #include "vkRayTracingUtil.hpp"
39 #include "deUniquePtr.hpp"
40 #include "deSTLUtil.hpp"
41 #include "deStringUtil.hpp"
52 namespace BindingModel
60 deUint32 getDescriptorNumericValue (deUint32 iteration, deUint32 bindingIdx, deUint32 descriptorIdx = 0u)
62 // When assigning numeric values for the descriptor contents, each descriptor will get 0x5aIIBBDD. II is an octed containing the
63 // iteration index. BB is an octet containing the binding index and DD is the descriptor index inside that binding.
64 constexpr deUint32 kNumericValueBase = 0x5a000000u;
66 return (kNumericValueBase | ((iteration & 0xFFu) << 16) | ((bindingIdx & 0xFFu) << 8) | (descriptorIdx & 0xFFu));
69 deUint16 getAccelerationStructureOffsetX (deUint32 descriptorNumericValue)
71 // Keep the lowest 16 bits (binding and descriptor idx) as the offset.
72 return static_cast<deUint16>(descriptorNumericValue);
75 // Value that will be stored in the output buffer to signal success reading values.
76 deUint32 getExpectedOutputBufferValue ()
81 // This value will be stored in an image to be sampled when checking descriptors containing samplers alone.
82 deUint32 getExternalSampledImageValue ()
87 // Value that will be ORed with the descriptor value before writing.
88 deUint32 getStoredValueMask ()
93 VkFormat getDescriptorImageFormat ()
95 return VK_FORMAT_R32_UINT;
98 VkExtent3D getDefaultExtent ()
100 return makeExtent3D(1u, 1u, 1u);
103 // Convert value to hexadecimal.
104 std::string toHex (deUint32 val)
106 std::ostringstream s;
107 s << "0x" << std::hex << val << "u";
111 // Returns the list of descriptor types that cannot be part of a mutable descriptor.
112 std::vector<VkDescriptorType> getForbiddenMutableTypes ()
114 return std::vector<VkDescriptorType>
116 VK_DESCRIPTOR_TYPE_MUTABLE_VALVE,
117 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
118 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
119 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
123 // Returns the list of descriptor types that are mandatory for the extension.
124 std::vector<VkDescriptorType> getMandatoryMutableTypes ()
126 return std::vector<VkDescriptorType>
128 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
129 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
130 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
131 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
132 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
133 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
137 // This helps quickly transform a vector of descriptor types into a bitmask, which makes it easier to check some conditions.
138 enum DescriptorTypeFlagBits
140 DTFB_SAMPLER = (1 << 0),
141 DTFB_COMBINED_IMAGE_SAMPLER = (1 << 1),
142 DTFB_SAMPLED_IMAGE = (1 << 2),
143 DTFB_STORAGE_IMAGE = (1 << 3),
144 DTFB_UNIFORM_TEXEL_BUFFER = (1 << 4),
145 DTFB_STORAGE_TEXEL_BUFFER = (1 << 5),
146 DTFB_UNIFORM_BUFFER = (1 << 6),
147 DTFB_STORAGE_BUFFER = (1 << 7),
148 DTFB_UNIFORM_BUFFER_DYNAMIC = (1 << 8),
149 DTFB_STORAGE_BUFFER_DYNAMIC = (1 << 9),
150 DTFB_INPUT_ATTACHMENT = (1 << 10),
151 DTFB_INLINE_UNIFORM_BLOCK_EXT = (1 << 11),
152 DTFB_ACCELERATION_STRUCTURE_KHR = (1 << 12),
153 DTFB_ACCELERATION_STRUCTURE_NV = (1 << 13),
154 DTFB_MUTABLE_VALVE = (1 << 14),
157 using DescriptorTypeFlags = deUint32;
159 // Convert type to its corresponding flag bit.
160 DescriptorTypeFlagBits toDescriptorTypeFlagBit (VkDescriptorType descriptorType)
162 switch (descriptorType)
164 case VK_DESCRIPTOR_TYPE_SAMPLER: return DTFB_SAMPLER;
165 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: return DTFB_COMBINED_IMAGE_SAMPLER;
166 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: return DTFB_SAMPLED_IMAGE;
167 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: return DTFB_STORAGE_IMAGE;
168 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: return DTFB_UNIFORM_TEXEL_BUFFER;
169 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: return DTFB_STORAGE_TEXEL_BUFFER;
170 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: return DTFB_UNIFORM_BUFFER;
171 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: return DTFB_STORAGE_BUFFER;
172 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: return DTFB_UNIFORM_BUFFER_DYNAMIC;
173 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: return DTFB_STORAGE_BUFFER_DYNAMIC;
174 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: return DTFB_INPUT_ATTACHMENT;
175 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: return DTFB_INLINE_UNIFORM_BLOCK_EXT;
176 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: return DTFB_ACCELERATION_STRUCTURE_KHR;
177 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV: return DTFB_ACCELERATION_STRUCTURE_NV;
178 case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: return DTFB_MUTABLE_VALVE;
187 // Convert vector of descriptor types to a bitfield.
188 DescriptorTypeFlags toDescriptorTypeFlags (const std::vector<VkDescriptorType>& types)
190 DescriptorTypeFlags result = 0u;
191 for (const auto& t : types)
192 result |= toDescriptorTypeFlagBit(t);
196 // Convert bitfield to vector of descriptor types.
197 std::vector<VkDescriptorType> toDescriptorTypeVector (DescriptorTypeFlags bitfield)
199 std::vector<VkDescriptorType> result;
201 if (bitfield & DTFB_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLER);
202 if (bitfield & DTFB_COMBINED_IMAGE_SAMPLER) result.push_back(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
203 if (bitfield & DTFB_SAMPLED_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
204 if (bitfield & DTFB_STORAGE_IMAGE) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
205 if (bitfield & DTFB_UNIFORM_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
206 if (bitfield & DTFB_STORAGE_TEXEL_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
207 if (bitfield & DTFB_UNIFORM_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
208 if (bitfield & DTFB_STORAGE_BUFFER) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
209 if (bitfield & DTFB_UNIFORM_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
210 if (bitfield & DTFB_STORAGE_BUFFER_DYNAMIC) result.push_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
211 if (bitfield & DTFB_INPUT_ATTACHMENT) result.push_back(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
212 if (bitfield & DTFB_INLINE_UNIFORM_BLOCK_EXT) result.push_back(VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
213 if (bitfield & DTFB_ACCELERATION_STRUCTURE_KHR) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
214 if (bitfield & DTFB_ACCELERATION_STRUCTURE_NV) result.push_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV);
215 if (bitfield & DTFB_MUTABLE_VALVE) result.push_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE);
220 // How to create the source set when copying descriptors from another set.
221 // * MUTABLE means to transform bindings into mutable bindings.
222 // * NONMUTABLE means to transform bindings into non-mutable bindings.
223 enum class SourceSetStrategy
230 enum class PoolMutableStrategy
237 // Type of information that's present in VkWriteDescriptorSet.
243 ACCELERATION_STRUCTURE_INFO,
251 VkDescriptorImageInfo imageInfo;
252 VkDescriptorBufferInfo bufferInfo;
253 VkBufferView bufferView;
254 VkWriteDescriptorSetAccelerationStructureKHR asInfo;
257 explicit WriteInfo (const VkDescriptorImageInfo& info_)
258 : writeType(WriteType::IMAGE_INFO)
262 explicit WriteInfo (const VkDescriptorBufferInfo& info_)
263 : writeType(WriteType::BUFFER_INFO)
267 explicit WriteInfo (VkBufferView view_)
268 : writeType(WriteType::BUFFER_VIEW)
272 explicit WriteInfo (const VkWriteDescriptorSetAccelerationStructureKHR& asInfo_)
273 : writeType(WriteType::ACCELERATION_STRUCTURE_INFO)
278 // Resource backing up a single binding.
279 enum class ResourceType
283 COMBINED_IMAGE_SAMPLER,
286 ACCELERATION_STRUCTURE,
289 // Type of resource backing up a particular descriptor type.
290 ResourceType toResourceType (VkDescriptorType descriptorType)
292 ResourceType resourceType = ResourceType::SAMPLER;
293 switch (descriptorType)
295 case VK_DESCRIPTOR_TYPE_SAMPLER:
296 resourceType = ResourceType::SAMPLER;
299 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
300 resourceType = ResourceType::COMBINED_IMAGE_SAMPLER;
303 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
304 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
305 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
306 resourceType = ResourceType::IMAGE;
309 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
310 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
311 resourceType = ResourceType::BUFFER_VIEW;
314 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
315 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
316 resourceType = ResourceType::BUFFER;
319 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
320 resourceType = ResourceType::ACCELERATION_STRUCTURE;
331 bool isShaderWritable (VkDescriptorType descriptorType)
333 return (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER || descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
334 descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
337 Move<VkSampler> makeDefaultSampler (const DeviceInterface& vkd, VkDevice device)
339 const VkSamplerCreateInfo samplerCreateInfo = {
340 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, // VkStructureType sType;
341 nullptr, // const void* pNext;
342 0u, // VkSamplerCreateFlags flags;
343 VK_FILTER_NEAREST, // VkFilter magFilter;
344 VK_FILTER_NEAREST, // VkFilter minFilter;
345 VK_SAMPLER_MIPMAP_MODE_NEAREST, // VkSamplerMipmapMode mipmapMode;
346 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeU;
347 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeV;
348 VK_SAMPLER_ADDRESS_MODE_REPEAT, // VkSamplerAddressMode addressModeW;
349 0.f, // float mipLodBias;
350 VK_FALSE, // VkBool32 anisotropyEnable;
351 1.f, // float maxAnisotropy;
352 VK_FALSE, // VkBool32 compareEnable;
353 VK_COMPARE_OP_ALWAYS, // VkCompareOp compareOp;
354 0.f, // float minLod;
355 0.f, // float maxLod;
356 VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, // VkBorderColor borderColor;
357 VK_FALSE, // VkBool32 unnormalizedCoordinates;
360 return createSampler(vkd, device, &samplerCreateInfo);
363 de::MovePtr<ImageWithMemory> makeDefaultImage (const DeviceInterface& vkd, VkDevice device, Allocator& alloc)
365 const auto extent = makeExtent3D(1u, 1u, 1u);
366 const VkImageUsageFlags usageFlags = (
367 VK_IMAGE_USAGE_SAMPLED_BIT
368 | VK_IMAGE_USAGE_STORAGE_BIT
369 | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
370 | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
371 | VK_IMAGE_USAGE_TRANSFER_SRC_BIT
372 | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
374 const VkImageCreateInfo imageCreateInfo = {
375 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
376 nullptr, // const void* pNext;
377 0u, // VkImageCreateFlags flags;
378 VK_IMAGE_TYPE_2D, // VkImageType imageType;
379 getDescriptorImageFormat(), // VkFormat format;
380 extent, // VkExtent3D extent;
381 1u, // deUint32 mipLevels;
382 1u, // deUint32 arrayLayers;
383 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
384 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
385 usageFlags, // VkImageUsageFlags usage;
386 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
387 0u, // deUint32 queueFamilyIndexCount;
388 nullptr, // const deUint32* pQueueFamilyIndices;
389 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
391 return de::MovePtr<ImageWithMemory>(new ImageWithMemory(vkd, device, alloc, imageCreateInfo, MemoryRequirement::Any));
394 Move<VkImageView> makeDefaultImageView (const DeviceInterface& vkd, VkDevice device, VkImage image)
396 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
397 return makeImageView(vkd, device, image, VK_IMAGE_VIEW_TYPE_2D, getDescriptorImageFormat(), subresourceRange);
400 de::MovePtr<BufferWithMemory> makeDefaultBuffer (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 numElements = 1u)
402 const VkBufferUsageFlags bufferUsage = (
403 VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
404 | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT
405 | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT
406 | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
407 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT
408 | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
410 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numElements));
412 const auto bufferCreateInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
414 return de::MovePtr<BufferWithMemory>(new BufferWithMemory(vkd, device, alloc, bufferCreateInfo, MemoryRequirement::HostVisible));
417 Move<VkBufferView> makeDefaultBufferView (const DeviceInterface& vkd, VkDevice device, VkBuffer buffer)
419 const auto bufferOffset = static_cast<VkDeviceSize>(0);
420 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(deUint32));
422 return makeBufferView(vkd, device, buffer, getDescriptorImageFormat(), bufferOffset, bufferSize);
425 struct AccelerationStructureData
427 using TLASPtr = de::MovePtr<TopLevelAccelerationStructure>;
428 using BLASPtr = de::MovePtr<BottomLevelAccelerationStructure>;
433 void swap (AccelerationStructureData& other)
435 auto myTlasPtr = tlas.release();
436 auto myBlasPtr = blas.release();
438 auto otherTlasPtr = other.tlas.release();
439 auto otherBlasPtr = other.blas.release();
441 tlas = TLASPtr(otherTlasPtr);
442 blas = BLASPtr(otherBlasPtr);
444 other.tlas = TLASPtr(myTlasPtr);
445 other.blas = BLASPtr(myBlasPtr);
448 AccelerationStructureData () : tlas() , blas() {}
450 AccelerationStructureData (AccelerationStructureData&& other)
451 : AccelerationStructureData()
456 AccelerationStructureData& operator= (AccelerationStructureData&& other)
463 AccelerationStructureData makeDefaultAccelerationStructure (const DeviceInterface& vkd, VkDevice device, VkCommandBuffer cmdBuffer, Allocator& alloc, bool triangles, deUint16 offsetX)
465 AccelerationStructureData data;
467 // Triangle around (offsetX, 0) with depth 5.0.
468 const float middleX = static_cast<float>(offsetX);
469 const float leftX = middleX - 0.5f;
470 const float rightX = middleX + 0.5f;
471 const float topY = 0.5f;
472 const float bottomY = -0.5f;
473 const float depth = 5.0f;
475 std::vector<tcu::Vec3> vertices;
479 vertices.reserve(3u);
480 vertices.emplace_back(middleX, topY, depth);
481 vertices.emplace_back(rightX, bottomY, depth);
482 vertices.emplace_back(leftX, bottomY, depth);
486 vertices.reserve(2u);
487 vertices.emplace_back(leftX, bottomY, depth);
488 vertices.emplace_back(rightX, topY, depth);
491 data.tlas = makeTopLevelAccelerationStructure();
492 data.blas = makeBottomLevelAccelerationStructure();
494 VkGeometryInstanceFlagsKHR instanceFlags = 0u;
496 instanceFlags |= VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR;
498 data.blas->addGeometry(vertices, triangles, VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR);
499 data.blas->createAndBuild(vkd, device, cmdBuffer, alloc);
501 de::SharedPtr<BottomLevelAccelerationStructure> blasSharedPtr (data.blas.release());
502 data.tlas->setInstanceCount(1u);
503 data.tlas->addInstance(blasSharedPtr, identityMatrix3x4, 0u, 0xFFu, 0u, instanceFlags);
504 data.tlas->createAndBuild(vkd, device, cmdBuffer, alloc);
509 const auto kShaderAccess = (VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT);
513 VkDescriptorType descriptorType;
514 ResourceType resourceType;
515 Move<VkSampler> sampler;
516 de::MovePtr<ImageWithMemory> imageWithMemory;
517 Move<VkImageView> imageView;
518 de::MovePtr<BufferWithMemory> bufferWithMemory;
519 Move<VkBufferView> bufferView;
520 AccelerationStructureData asData;
521 deUint32 initialValue;
523 Resource (VkDescriptorType descriptorType_, const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, bool useAABBs, deUint32 initialValue_, deUint32 numElements = 1u)
524 : descriptorType (descriptorType_)
525 , resourceType (toResourceType(descriptorType))
529 , bufferWithMemory ()
532 , initialValue (initialValue_)
534 if (numElements != 1u)
535 DE_ASSERT(resourceType == ResourceType::BUFFER);
537 switch (resourceType)
539 case ResourceType::SAMPLER:
540 sampler = makeDefaultSampler(vkd, device);
543 case ResourceType::IMAGE:
544 imageWithMemory = makeDefaultImage(vkd, device, alloc);
545 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
548 case ResourceType::COMBINED_IMAGE_SAMPLER:
549 sampler = makeDefaultSampler(vkd, device);
550 imageWithMemory = makeDefaultImage(vkd, device, alloc);
551 imageView = makeDefaultImageView(vkd, device, imageWithMemory->get());
554 case ResourceType::BUFFER:
555 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc, numElements);
558 case ResourceType::BUFFER_VIEW:
559 bufferWithMemory = makeDefaultBuffer(vkd, device, alloc);
560 bufferView = makeDefaultBufferView(vkd, device, bufferWithMemory->get());
563 case ResourceType::ACCELERATION_STRUCTURE:
565 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
566 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
567 const auto cmdBuffer = cmdBufferPtr.get();
568 const bool triangles = !useAABBs;
570 beginCommandBuffer(vkd, cmdBuffer);
571 asData = makeDefaultAccelerationStructure(vkd, device, cmdBuffer, alloc, triangles, getAccelerationStructureOffsetX(initialValue));
572 endCommandBuffer(vkd, cmdBuffer);
573 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
582 if (imageWithMemory || bufferWithMemory)
584 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
585 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
586 const auto cmdBuffer = cmdBufferPtr.get();
590 // Prepare staging buffer.
591 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(initialValue));
592 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
593 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
595 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
596 auto& bufferAlloc = stagingBuffer.getAllocation();
597 void* bufferData = bufferAlloc.getHostPtr();
599 deMemcpy(bufferData, &initialValue, sizeof(initialValue));
600 flushAlloc(vkd, device, bufferAlloc);
602 beginCommandBuffer(vkd, cmdBuffer);
604 // Transition and copy image.
605 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
606 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
608 // Switch image to TRANSFER_DST_OPTIMAL before copying data to it.
609 const auto subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
611 const auto preTransferBarrier = makeImageMemoryBarrier(
612 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
613 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
614 imageWithMemory->get(), subresourceRange);
616 vkd.cmdPipelineBarrier(
617 cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
618 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
620 // Copy data to image.
621 vkd.cmdCopyBufferToImage(cmdBuffer, stagingBuffer.get(), imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
623 // Switch image to the GENERAL layout before reading or writing to it from shaders.
624 const auto postTransferBarrier = makeImageMemoryBarrier(
625 VK_ACCESS_TRANSFER_WRITE_BIT, kShaderAccess,
626 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
627 imageWithMemory->get(), subresourceRange);
629 vkd.cmdPipelineBarrier(
630 cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
631 0u, nullptr, 0u, nullptr, 1u, &postTransferBarrier);
633 endCommandBuffer(vkd, cmdBuffer);
634 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
637 if (bufferWithMemory)
639 auto& bufferAlloc = bufferWithMemory->getAllocation();
640 void* bufferData = bufferAlloc.getHostPtr();
642 const std::vector<deUint32> bufferValues(numElements, initialValue);
643 deMemcpy(bufferData, bufferValues.data(), de::dataSize(bufferValues));
644 flushAlloc(vkd, device, bufferAlloc);
646 beginCommandBuffer(vkd, cmdBuffer);
648 // Make sure host writes happen before shader reads/writes. Note: this barrier is not needed in theory.
649 const auto hostToShaderBarrier = makeMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, kShaderAccess);
651 vkd.cmdPipelineBarrier(
652 cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0u,
653 1u, &hostToShaderBarrier, 0u, nullptr, 0u, nullptr);
655 endCommandBuffer(vkd, cmdBuffer);
656 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
661 // Remove problematic copy constructor.
662 Resource (const Resource&) = delete;
665 Resource (Resource&& other) noexcept
666 : descriptorType (other.descriptorType)
667 , resourceType (other.resourceType)
668 , sampler (other.sampler)
669 , imageWithMemory (other.imageWithMemory.release())
670 , imageView (other.imageView)
671 , bufferWithMemory (other.bufferWithMemory.release())
672 , bufferView (other.bufferView)
673 , asData (std::move(other.asData))
674 , initialValue (other.initialValue)
680 WriteInfo makeWriteInfo () const
682 using WriteInfoPtr = de::MovePtr<WriteInfo>;
684 WriteInfoPtr writeInfo;
686 switch (resourceType)
688 case ResourceType::SAMPLER:
690 const VkDescriptorImageInfo imageInfo = { sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_UNDEFINED };
691 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
695 case ResourceType::IMAGE:
697 const VkDescriptorImageInfo imageInfo = { DE_NULL, imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
698 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
702 case ResourceType::COMBINED_IMAGE_SAMPLER:
704 const VkDescriptorImageInfo imageInfo = { sampler.get(), imageView.get(), VK_IMAGE_LAYOUT_GENERAL };
705 writeInfo = WriteInfoPtr (new WriteInfo(imageInfo));
709 case ResourceType::BUFFER:
711 const VkDescriptorBufferInfo bufferInfo = { bufferWithMemory->get(), 0ull, static_cast<VkDeviceSize>(sizeof(deUint32)) };
712 writeInfo = WriteInfoPtr (new WriteInfo(bufferInfo));
716 case ResourceType::BUFFER_VIEW:
717 writeInfo = WriteInfoPtr (new WriteInfo(bufferView.get()));
720 case ResourceType::ACCELERATION_STRUCTURE:
722 VkWriteDescriptorSetAccelerationStructureKHR asWrite = initVulkanStructure();
723 asWrite.accelerationStructureCount = 1u;
724 asWrite.pAccelerationStructures = asData.tlas.get()->getPtr();
725 writeInfo = WriteInfoPtr (new WriteInfo(asWrite));
737 tcu::Maybe<deUint32> getStoredValue (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 position = 0u) const
740 DE_ASSERT(static_cast<bool>(bufferWithMemory));
742 if (imageWithMemory || bufferWithMemory)
744 // Command pool and buffer.
745 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
746 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
747 const auto cmdBuffer = cmdBufferPtr.get();
751 // Prepare staging buffer.
753 const auto bufferSize = static_cast<VkDeviceSize>(sizeof(result));
754 const VkBufferUsageFlags bufferUsage = (VK_BUFFER_USAGE_TRANSFER_DST_BIT);
755 const auto stagingBufferInfo = makeBufferCreateInfo(bufferSize, bufferUsage);
757 BufferWithMemory stagingBuffer(vkd, device, alloc, stagingBufferInfo, MemoryRequirement::HostVisible);
758 auto& bufferAlloc = stagingBuffer.getAllocation();
759 void* bufferData = bufferAlloc.getHostPtr();
761 // Copy image value to staging buffer.
762 beginCommandBuffer(vkd, cmdBuffer);
764 // Make sure shader accesses happen before transfers and prepare image for transfer.
765 const auto colorResourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
767 const auto preTransferBarrier = makeImageMemoryBarrier(
768 kShaderAccess, VK_ACCESS_TRANSFER_READ_BIT,
769 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
770 imageWithMemory->get(), colorResourceRange);
772 vkd.cmdPipelineBarrier(
773 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u,
774 0u, nullptr, 0u, nullptr, 1u, &preTransferBarrier);
776 // Copy image contents to staging buffer.
777 const auto copyRegion = makeBufferImageCopy(makeExtent3D(1u, 1u, 1u),
778 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u));
779 vkd.cmdCopyImageToBuffer(cmdBuffer, imageWithMemory->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, stagingBuffer.get(), 1u, ©Region);
781 // Make sure writes are visible from the host.
782 const auto postTransferBarrier = makeMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT);
783 vkd.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u, 1u, &postTransferBarrier, 0u, nullptr, 0u, nullptr);
785 endCommandBuffer(vkd, cmdBuffer);
786 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
788 // Get value from staging buffer.
789 invalidateAlloc(vkd, device, bufferAlloc);
790 deMemcpy(&result, bufferData, sizeof(result));
791 return tcu::just(result);
794 if (bufferWithMemory)
796 auto& bufferAlloc = bufferWithMemory->getAllocation();
797 auto bufferData = reinterpret_cast<const char*>(bufferAlloc.getHostPtr());
800 // Make sure shader writes are visible from the host.
801 beginCommandBuffer(vkd, cmdBuffer);
803 const auto shaderToHostBarrier = makeMemoryBarrier(kShaderAccess, VK_ACCESS_HOST_READ_BIT);
804 vkd.cmdPipelineBarrier(
805 cmdBuffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0u,
806 1u, &shaderToHostBarrier, 0u, nullptr, 0u, nullptr);
808 endCommandBuffer(vkd, cmdBuffer);
809 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
811 invalidateAlloc(vkd, device, bufferAlloc);
812 deMemcpy(&result, bufferData + sizeof(deUint32) * static_cast<size_t>(position), sizeof(result));
813 return tcu::just(result);
821 struct BindingInterface
823 virtual ~BindingInterface () {}
825 // Minimum number of iterations to test all mutable types.
826 virtual deUint32 maxTypes () const = 0;
828 // Types that will be used by the binding at a given iteration.
829 virtual std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const = 0;
831 // Binding's main type.
832 virtual VkDescriptorType mainType () const = 0;
834 // Binding's list of mutable types, if present.
835 virtual std::vector<VkDescriptorType> mutableTypes () const = 0;
837 // Descriptor count in the binding.
838 virtual size_t size () const = 0;
840 // Is the binding an array binding?
841 virtual bool isArray () const = 0;
843 // Is the binding an unbounded array?
844 virtual bool isUnbounded () const = 0;
846 // Will the binding use different descriptor types in a given iteration?
847 virtual bool needsAliasing (deUint32 iteration) const
849 const auto typesVec = typesAtIteration(iteration);
850 std::set<VkDescriptorType> descTypes(begin(typesVec), end(typesVec));
851 return (descTypes.size() > 1u);
854 // Will the binding need aliasing on any iteration up to a given number?
855 virtual bool needsAliasingUpTo (deUint32 numIterations) const
857 std::vector<bool> needsAliasingFlags;
858 needsAliasingFlags.reserve(numIterations);
860 for (deUint32 iter = 0u; iter < numIterations; ++iter)
861 needsAliasingFlags.push_back(needsAliasing(iter));
863 return std::any_of(begin(needsAliasingFlags), end(needsAliasingFlags), [] (bool f) { return f; });
867 virtual bool hasDescriptorType (deUint32 iteration, VkDescriptorType descriptorType) const
869 const auto typesVec = typesAtIteration(iteration);
870 return (std::find(begin(typesVec), end(typesVec), descriptorType) != end(typesVec));
874 // Convert one particular binding to a mutable or non-mutable equivalent binding, returning the equivalent binding.
875 virtual de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const = 0;
876 virtual de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const = 0;
878 // Create resources needed to back up this binding.
879 virtual std::vector<Resource> createResources (
880 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
881 deUint32 iteration, bool useAABBs, deUint32 baseValue) const = 0;
883 // Get GLSL binding declarations. Note: no array size means no array, if size is < 0 it means unbounded array.
884 virtual std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const = 0;
886 // Get GLSL statements to check this binding.
887 virtual std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const = 0;
890 // Represents a single binding that will be used in a test.
891 class SingleBinding : public BindingInterface
894 VkDescriptorType type; // The descriptor type.
895 std::vector<VkDescriptorType> mutableTypesVec; // The types that will be used for each iteration of a test if mutable.
898 SingleBinding (VkDescriptorType type_, std::vector<VkDescriptorType> mutableTypes_)
900 , mutableTypesVec (std::move(mutableTypes_))
902 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
903 const auto kBeginForbidden = begin(kForbiddenMutableTypes);
904 const auto kEndForbidden = end(kForbiddenMutableTypes);
906 // For release builds.
907 DE_UNREF(kBeginForbidden);
908 DE_UNREF(kEndForbidden);
910 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
912 DE_ASSERT(mutableTypesVec.empty());
916 DE_ASSERT(!mutableTypesVec.empty());
917 DE_ASSERT(std::none_of(begin(mutableTypesVec), end(mutableTypesVec),
918 [&kBeginForbidden, &kEndForbidden] (VkDescriptorType t) -> bool {
919 return std::find(kBeginForbidden, kEndForbidden, t) != kEndForbidden;
924 deUint32 maxTypes () const override
926 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
928 const auto vecSize = mutableTypesVec.size();
929 DE_ASSERT(vecSize <= std::numeric_limits<deUint32>::max());
930 return static_cast<deUint32>(vecSize);
933 VkDescriptorType typeAtIteration (deUint32 iteration) const
935 return typesAtIteration(iteration)[0];
938 std::vector<VkDescriptorType> usedTypes () const
940 if (type != VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
941 return std::vector<VkDescriptorType>(1u, type);
942 return mutableTypesVec;
945 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
947 const auto typesVec = usedTypes();
948 return std::vector<VkDescriptorType>(1u, typesVec[static_cast<size_t>(iteration) % typesVec.size()]);
951 VkDescriptorType mainType () const override
956 std::vector<VkDescriptorType> mutableTypes () const override
958 return mutableTypesVec;
961 size_t size () const override
966 bool isArray () const override
971 bool isUnbounded () const override
976 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
980 static const auto kMandatoryMutableTypeFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
981 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
983 const auto descFlags = toDescriptorTypeFlags(mutableTypesVec);
984 return de::MovePtr<BindingInterface>(new SingleBinding(type, toDescriptorTypeVector(descFlags)));
987 // Make sure it's not a forbidden mutable type.
988 static const auto kForbiddenMutableTypes = getForbiddenMutableTypes();
989 DE_ASSERT(std::find(begin(kForbiddenMutableTypes), end(kForbiddenMutableTypes), type) == end(kForbiddenMutableTypes));
991 // Convert the binding to mutable using a wider set of descriptor types if possible, including the binding type.
992 const auto descFlags = (kMandatoryMutableTypeFlags | toDescriptorTypeFlagBit(type));
994 return de::MovePtr<BindingInterface>(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, toDescriptorTypeVector(descFlags)));
997 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
999 return de::MovePtr<BindingInterface>(new SingleBinding(typeAtIteration(iteration), std::vector<VkDescriptorType>()));
1002 std::vector<Resource> createResources (
1003 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1004 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1006 const auto descriptorType = typeAtIteration(iteration);
1008 std::vector<Resource> resources;
1009 resources.emplace_back(descriptorType, vkd, device, alloc, qIndex, queue, useAABBs, baseValue);
1013 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1015 const auto descriptorType = typeAtIteration(iteration);
1016 const std::string arraySuffix = ((static_cast<bool>(arraySize)) ? ((arraySize.get() < 0) ? "[]" : ("[" + de::toString(arraySize.get()) + "]")) : "");
1017 const std::string layoutAttribs = "set=" + de::toString(setNum) + ", binding=" + de::toString(bindingNum);
1018 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1019 const std::string nameSuffix = bindingSuffix + arraySuffix;
1020 std::ostringstream declarations;
1022 declarations << "layout (";
1024 switch (descriptorType)
1026 case VK_DESCRIPTOR_TYPE_SAMPLER:
1027 declarations << layoutAttribs << ") uniform sampler sampler" << nameSuffix;
1030 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1031 declarations << layoutAttribs << ") uniform usampler2D combinedSampler" << nameSuffix;
1034 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1035 declarations << layoutAttribs << ") uniform utexture2D sampledImage" << nameSuffix;
1038 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1039 declarations << layoutAttribs << ") uniform uboBlock" << bindingSuffix << " { uint val; } ubo" << nameSuffix;
1042 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1043 declarations << layoutAttribs << ") buffer sboBlock" << bindingSuffix << " { uint val; } ssbo" << nameSuffix;
1046 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1047 declarations << layoutAttribs << ") uniform utextureBuffer uniformTexel" << nameSuffix;
1050 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1051 declarations << layoutAttribs << ", r32ui) uniform uimageBuffer storageTexel" << nameSuffix;
1054 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1055 declarations << layoutAttribs << ", r32ui) uniform uimage2D storageImage" << nameSuffix;
1058 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1059 declarations << layoutAttribs << ", input_attachment_index=" << inputAttachmentIdx << ") uniform usubpassInput inputAttachment" << nameSuffix;
1062 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1063 declarations << layoutAttribs << ") uniform accelerationStructureEXT accelerationStructure" << nameSuffix;
1071 declarations << ";\n";
1073 return declarations.str();
1076 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1078 const auto descriptorType = typeAtIteration(iteration);
1079 const std::string bindingSuffix = "_" + de::toString(setNum) + "_" + de::toString(bindingNum);
1081 std::string indexSuffix;
1084 indexSuffix = de::toString(arrayIndex.get());
1085 if (usePushConstants)
1086 indexSuffix += " + pc.zero";
1087 indexSuffix = "[" + indexSuffix + "]";
1090 const std::string nameSuffix = bindingSuffix + indexSuffix;
1091 const std::string baseValue = toHex(baseValue_);
1092 const std::string externalImageValue = toHex(getExternalSampledImageValue());
1093 const std::string mask = toHex(getStoredValueMask());
1095 std::ostringstream checks;
1097 // Note: all of these depend on an external anyError uint variable.
1098 switch (descriptorType)
1100 case VK_DESCRIPTOR_TYPE_SAMPLER:
1101 // Note this depends on an "externalSampledImage" binding.
1103 checks << " uint readValue = texture(usampler2D(externalSampledImage, sampler" << nameSuffix << "), vec2(0, 0)).r;\n";
1104 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1105 checks << " anyError |= ((readValue == " << externalImageValue << ") ? 0u : 1u);\n";
1106 //checks << " anyError = readValue;\n";
1110 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1112 checks << " uint readValue = texture(combinedSampler" << nameSuffix << ", vec2(0, 0)).r;\n";
1113 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1114 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1115 //checks << " anyError = readValue;\n";
1119 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1120 // Note this depends on an "externalSampler" binding.
1122 checks << " uint readValue = texture(usampler2D(sampledImage" << nameSuffix << ", externalSampler), vec2(0, 0)).r;\n";
1123 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1124 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1125 //checks << " anyError = readValue;\n";
1129 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1131 checks << " uint readValue = ubo" << nameSuffix << ".val;\n";
1132 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1133 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1134 //checks << " anyError = readValue;\n";
1138 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1140 checks << " uint readValue = ssbo" << nameSuffix << ".val;\n";
1141 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1142 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1143 //checks << " anyError = readValue;\n";
1145 checks << " ssbo" << nameSuffix << ".val = (readValue | " << mask << ");\n";
1149 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1151 checks << " uint readValue = texelFetch(uniformTexel" << nameSuffix << ", 0).x;\n";
1152 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1153 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1154 //checks << " anyError = readValue;\n";
1158 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1160 checks << " uint readValue = imageLoad(storageTexel" << nameSuffix << ", 0).x;\n";
1161 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1162 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1163 //checks << " anyError = readValue;\n";
1164 checks << " readValue |= " << mask << ";\n";
1166 checks << " imageStore(storageTexel" << nameSuffix << ", 0, uvec4(readValue, 0, 0, 0));\n";
1170 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1172 checks << " uint readValue = imageLoad(storageImage" << nameSuffix << ", ivec2(0, 0)).x;\n";
1173 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1174 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1175 //checks << " anyError = readValue;\n";
1176 checks << " readValue |= " << mask << ";\n";
1178 checks << " imageStore(storageImage" << nameSuffix << ", ivec2(0, 0), uvec4(readValue, 0, 0, 0));\n";
1182 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1184 checks << " uint readValue = subpassLoad(inputAttachment" << nameSuffix << ").x;\n";
1185 checks << " debugPrintfEXT(\"iteration-" << iteration << nameSuffix << ": 0x%xu\\n\", readValue);\n";
1186 checks << " anyError |= ((readValue == " << baseValue << ") ? 0u : 1u);\n";
1187 //checks << " anyError = readValue;\n";
1191 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1193 checks << " const uint cullMask = 0xFF;\n";
1194 checks << " const vec3 origin = vec3(" << getAccelerationStructureOffsetX(baseValue_) << ".0, 0.0, 0.0);\n";
1195 checks << " const vec3 direction = vec3(0.0, 0.0, 1.0);\n";
1196 checks << " const float tmin = 1.0;\n";
1197 checks << " const float tmax = 10.0;\n";
1198 checks << " uint candidateFound = 0u;\n";
1199 checks << " rayQueryEXT rq;\n";
1200 checks << " rayQueryInitializeEXT(rq, accelerationStructure" << nameSuffix << ", gl_RayFlagsNoneEXT, cullMask, origin, tmin, direction, tmax);\n";
1201 checks << " while (rayQueryProceedEXT(rq)) {\n";
1202 checks << " const uint candidateType = rayQueryGetIntersectionTypeEXT(rq, false);\n";
1203 checks << " if (candidateType == gl_RayQueryCandidateIntersectionTriangleEXT || candidateType == gl_RayQueryCandidateIntersectionAABBEXT) {\n";
1204 checks << " candidateFound = 1u;\n";
1207 checks << " anyError |= ((candidateFound == 1u) ? 0u : 1u);\n";
1216 return checks.str();
1220 // Represents an array of bindings. Individual bindings are stored as SingleBindings because each one of them may take a different
1221 // type in each iteration (i.e. they can all have different descriptor type vectors).
1222 class ArrayBinding : public BindingInterface
1226 std::vector<SingleBinding> bindings;
1229 ArrayBinding (bool unbounded_, std::vector<SingleBinding> bindings_)
1230 : unbounded (unbounded_)
1231 , bindings (std::move(bindings_))
1233 // We need to check all single bindings have the same effective type, even if mutable descriptors have different orders.
1234 DE_ASSERT(!bindings.empty());
1236 std::set<VkDescriptorType> basicTypes;
1237 std::set<DescriptorTypeFlags> bindingTypes;
1239 for (const auto& b : bindings)
1241 basicTypes.insert(b.mainType());
1242 bindingTypes.insert(toDescriptorTypeFlags(b.usedTypes()));
1245 DE_ASSERT(basicTypes.size() == 1u);
1246 DE_ASSERT(bindingTypes.size() == 1u);
1248 // For release builds.
1249 DE_UNREF(basicTypes);
1250 DE_UNREF(bindingTypes);
1253 deUint32 maxTypes () const override
1255 // Each binding may have the same effective type but a different number of iterations due to repeated types.
1256 std::vector<size_t> bindingSizes;
1257 bindingSizes.reserve(bindings.size());
1259 std::transform(begin(bindings), end(bindings), std::back_inserter(bindingSizes),
1260 [] (const SingleBinding& b) { return b.usedTypes().size(); });
1262 const auto maxElement = std::max_element(begin(bindingSizes), end(bindingSizes));
1263 DE_ASSERT(maxElement != end(bindingSizes));
1264 DE_ASSERT(*maxElement <= std::numeric_limits<deUint32>::max());
1265 return static_cast<deUint32>(*maxElement);
1268 std::vector<VkDescriptorType> typesAtIteration (deUint32 iteration) const override
1270 std::vector<VkDescriptorType> result;
1271 result.reserve(bindings.size());
1273 for (const auto& b : bindings)
1274 result.push_back(b.typeAtIteration(iteration));
1279 VkDescriptorType mainType () const override
1281 return bindings[0].mainType();
1284 std::vector<VkDescriptorType> mutableTypes () const override
1286 return bindings[0].mutableTypes();
1289 size_t size () const override
1291 return bindings.size();
1294 bool isArray () const override
1299 bool isUnbounded () const override
1304 de::MovePtr<BindingInterface> toMutable (deUint32 iteration) const override
1306 // Replicate the first binding once converted, as all are equivalent.
1307 const auto firstBindingPtr = bindings[0].toMutable(iteration);
1308 const auto firstBinding = *dynamic_cast<SingleBinding*>(firstBindingPtr.get());
1309 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1311 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1314 de::MovePtr<BindingInterface> toNonMutable (deUint32 iteration) const override
1316 // Make sure this binding can be converted to nonmutable for a given iteration.
1317 DE_ASSERT(!needsAliasing(iteration));
1319 // We could use each SingleBinding's toNonMutable(), but this is the same.
1320 const auto descType = bindings[0].typeAtIteration(iteration);
1321 const SingleBinding firstBinding (descType, std::vector<VkDescriptorType>());
1322 const std::vector<SingleBinding> newBindings (bindings.size(), firstBinding);
1324 return de::MovePtr<BindingInterface>(new ArrayBinding(unbounded, newBindings));
1327 std::vector<Resource> createResources (
1328 const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue,
1329 deUint32 iteration, bool useAABBs, deUint32 baseValue) const override
1331 std::vector<Resource> resources;
1332 const auto numBindings = static_cast<deUint32>(bindings.size());
1334 for (deUint32 i = 0u; i < numBindings; ++i)
1336 auto resourceVec = bindings[i].createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, baseValue + i);
1337 resources.emplace_back(std::move(resourceVec[0]));
1343 // We will ignore the array size parameter.
1344 std::string glslDeclarations (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 inputAttachmentIdx, tcu::Maybe<deInt32> arraySize) const override
1346 const auto descriptorCount = bindings.size();
1347 const auto arraySizeVal = (isUnbounded() ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(descriptorCount)));
1349 DE_UNREF(arraySize);
1350 DE_ASSERT(descriptorCount < static_cast<size_t>(std::numeric_limits<deInt32>::max()));
1352 // Maybe a single declaration is enough.
1353 if (!needsAliasing(iteration))
1354 return bindings[0].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1356 // Aliasing needed. Avoid reusing types.
1357 const auto descriptorTypes = typesAtIteration(iteration);
1358 std::set<VkDescriptorType> usedTypes;
1359 std::ostringstream declarations;
1361 for (size_t descriptorIdx = 0u; descriptorIdx < descriptorCount; ++descriptorIdx)
1363 const auto& descriptorType = descriptorTypes[descriptorIdx];
1364 if (usedTypes.count(descriptorType) > 0)
1367 usedTypes.insert(descriptorType);
1368 declarations << bindings[descriptorIdx].glslDeclarations(iteration, setNum, bindingNum, inputAttachmentIdx, arraySizeVal);
1371 return declarations.str();
1374 std::string glslCheckStatements (deUint32 iteration, deUint32 setNum, deUint32 bindingNum, deUint32 baseValue_, tcu::Maybe<deUint32> arrayIndex, bool usePushConstants) const override
1376 DE_ASSERT(!arrayIndex);
1377 DE_UNREF(arrayIndex); // For release builds.
1379 std::ostringstream checks;
1380 const auto numDescriptors = static_cast<deUint32>(bindings.size());
1382 for (deUint32 descriptorIdx = 0u; descriptorIdx < numDescriptors; ++descriptorIdx)
1384 const auto& binding = bindings[descriptorIdx];
1385 checks << binding.glslCheckStatements(iteration, setNum, bindingNum, baseValue_ + descriptorIdx, tcu::just(descriptorIdx), usePushConstants);
1388 return checks.str();
1392 class DescriptorSet;
1394 using DescriptorSetPtr = de::SharedPtr<DescriptorSet>;
1399 using BindingInterfacePtr = de::MovePtr<BindingInterface>;
1400 using BindingPtrVector = std::vector<BindingInterfacePtr>;
1403 BindingPtrVector bindings;
1406 explicit DescriptorSet (BindingPtrVector& bindings_)
1407 : bindings(std::move(bindings_))
1409 DE_ASSERT(!bindings.empty());
1412 size_t numBindings () const
1414 return bindings.size();
1417 const BindingInterface* getBinding (size_t bindingIdx) const
1419 return bindings.at(bindingIdx).get();
1422 // Maximum number of descriptor types used by any binding in the set.
1423 deUint32 maxTypes () const
1425 std::vector<deUint32> maxSizes;
1426 maxSizes.reserve(bindings.size());
1428 std::transform(begin(bindings), end(bindings), std::back_inserter(maxSizes),
1429 [] (const BindingInterfacePtr& b) { return b->maxTypes(); });
1431 const auto maxElement = std::max_element(begin(maxSizes), end(maxSizes));
1432 DE_ASSERT(maxElement != end(maxSizes));
1436 // Create another descriptor set that can be the source for copies when setting descriptor values.
1437 DescriptorSetPtr genSourceSet (SourceSetStrategy strategy, deUint32 iteration) const
1439 BindingPtrVector newBindings;
1440 for (const auto& b : bindings)
1442 if (strategy == SourceSetStrategy::MUTABLE)
1443 newBindings.push_back(b->toMutable(iteration));
1445 newBindings.push_back(b->toNonMutable(iteration));
1448 return DescriptorSetPtr(new DescriptorSet(newBindings));
1451 // Makes a descriptor pool that can be used when allocating descriptors for this set.
1452 Move<VkDescriptorPool> makeDescriptorPool (const DeviceInterface& vkd, VkDevice device, PoolMutableStrategy strategy, VkDescriptorPoolCreateFlags flags) const
1454 std::vector<VkDescriptorPoolSize> poolSizes;
1455 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1456 std::vector<VkMutableDescriptorTypeListVALVE> mutableTypeLists;
1458 // Make vector element addresses stable.
1459 const auto bindingCount = numBindings();
1460 poolSizes.reserve(bindingCount);
1461 mutableTypesVec.reserve(bindingCount);
1462 mutableTypeLists.reserve(bindingCount);
1464 for (const auto& b : bindings)
1466 const auto mainType = b->mainType();
1467 const VkDescriptorPoolSize poolSize = {
1469 static_cast<deUint32>(b->size()),
1471 poolSizes.push_back(poolSize);
1473 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1475 if (mainType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE)
1477 if (strategy == PoolMutableStrategy::KEEP_TYPES)
1479 mutableTypesVec.emplace_back(b->mutableTypes());
1483 // Expand the type list with the mandatory types.
1484 static const auto mandatoryTypesFlags = toDescriptorTypeFlags(getMandatoryMutableTypes());
1485 const auto bindingTypes = toDescriptorTypeVector(mandatoryTypesFlags | toDescriptorTypeFlags(b->mutableTypes()));
1487 mutableTypesVec.emplace_back(bindingTypes);
1490 const auto& lastVec = mutableTypesVec.back();
1491 const VkMutableDescriptorTypeListVALVE typeList = { static_cast<deUint32>(lastVec.size()), de::dataOrNull(lastVec) };
1492 mutableTypeLists.push_back(typeList);
1496 const VkMutableDescriptorTypeListVALVE typeList = { 0u, nullptr };
1497 mutableTypeLists.push_back(typeList);
1500 else if (strategy == PoolMutableStrategy::NO_TYPES)
1501 ; // Do nothing, we will not use any type list.
1506 VkDescriptorPoolCreateInfo poolCreateInfo = initVulkanStructure();
1508 poolCreateInfo.maxSets = 1u;
1509 poolCreateInfo.flags = flags;
1510 poolCreateInfo.poolSizeCount = static_cast<deUint32>(poolSizes.size());
1511 poolCreateInfo.pPoolSizes = de::dataOrNull(poolSizes);
1513 VkMutableDescriptorTypeCreateInfoVALVE mutableInfo = initVulkanStructure();
1515 if (strategy == PoolMutableStrategy::KEEP_TYPES || strategy == PoolMutableStrategy::EXPAND_TYPES)
1517 mutableInfo.mutableDescriptorTypeListCount = static_cast<deUint32>(mutableTypeLists.size());
1518 mutableInfo.pMutableDescriptorTypeLists = de::dataOrNull(mutableTypeLists);
1519 poolCreateInfo.pNext = &mutableInfo;
1522 return createDescriptorPool(vkd, device, &poolCreateInfo);
1526 // Building the descriptor set layout create info structure is cumbersome, so we'll reuse the same procedure to check support
1527 // and create the layout. This structure contains the result. "supported" is created as an enum to avoid the Move<> to bool
1528 // conversion cast in the contructors.
1529 struct DescriptorSetLayoutResult
1531 enum class LayoutSupported { NO = 0, YES };
1533 LayoutSupported supported;
1534 Move<VkDescriptorSetLayout> layout;
1536 explicit DescriptorSetLayoutResult (Move<VkDescriptorSetLayout>&& layout_)
1537 : supported (LayoutSupported::YES)
1541 explicit DescriptorSetLayoutResult (LayoutSupported supported_)
1542 : supported (supported_)
1547 DescriptorSetLayoutResult makeOrCheckDescriptorSetLayout (bool checkOnly, const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1549 const auto numIterations = maxTypes();
1550 std::vector<VkDescriptorSetLayoutBinding> bindingsVec;
1551 std::vector<std::vector<VkDescriptorType>> mutableTypesVec;
1552 std::vector<VkMutableDescriptorTypeListVALVE> mutableTypeLists;
1554 // Make vector element addresses stable.
1555 const auto bindingCount = numBindings();
1556 bindingsVec.reserve(bindingCount);
1557 mutableTypesVec.reserve(bindingCount);
1558 mutableTypeLists.reserve(bindingCount);
1560 for (size_t bindingIdx = 0u; bindingIdx < bindings.size(); ++bindingIdx)
1562 const auto& binding = bindings[bindingIdx];
1563 const auto mainType = binding->mainType();
1565 const VkDescriptorSetLayoutBinding layoutBinding = {
1566 static_cast<deUint32>(bindingIdx), // deUint32 binding;
1567 mainType, // VkDescriptorType descriptorType;
1568 static_cast<deUint32>(binding->size()), // deUint32 descriptorCount;
1569 stageFlags, // VkShaderStageFlags stageFlags;
1570 nullptr, // const VkSampler* pImmutableSamplers;
1572 bindingsVec.push_back(layoutBinding);
1574 // This list may be empty for non-mutable types, which is fine.
1575 mutableTypesVec.push_back(binding->mutableTypes());
1576 const auto& lastVec = mutableTypesVec.back();
1578 const VkMutableDescriptorTypeListVALVE typeList = {
1579 static_cast<deUint32>(lastVec.size()), // deUint32 descriptorTypeCount;
1580 de::dataOrNull(lastVec), // const VkDescriptorType* pDescriptorTypes;
1582 mutableTypeLists.push_back(typeList);
1585 // Make sure to include the variable descriptor count and/or update after bind binding flags.
1586 const bool updateAfterBind = ((createFlags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT) != 0u);
1587 bool lastIsUnbounded = false;
1588 bool aliasingNeded = false;
1589 std::vector<bool> bindingNeedsAliasing(bindings.size(), false);
1591 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1593 if (bindingIdx < bindings.size() - 1)
1594 DE_ASSERT(!bindings[bindingIdx]->isUnbounded());
1596 lastIsUnbounded = bindings[bindingIdx]->isUnbounded();
1598 if (bindings[bindingIdx]->needsAliasingUpTo(numIterations))
1600 bindingNeedsAliasing[bindingIdx] = true;
1601 aliasingNeded = true;
1605 using FlagsCreateInfoPtr = de::MovePtr<VkDescriptorSetLayoutBindingFlagsCreateInfo>;
1606 using BindingFlagsVecPtr = de::MovePtr<std::vector<VkDescriptorBindingFlags>>;
1608 FlagsCreateInfoPtr flagsCreateInfo;
1609 BindingFlagsVecPtr bindingFlagsVec;
1611 if (updateAfterBind || lastIsUnbounded || aliasingNeded)
1613 flagsCreateInfo = FlagsCreateInfoPtr(new VkDescriptorSetLayoutBindingFlagsCreateInfo);
1614 *flagsCreateInfo = initVulkanStructure();
1616 bindingFlagsVec = BindingFlagsVecPtr(new std::vector<VkDescriptorBindingFlags>(bindingsVec.size(), (updateAfterBind ? VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT : 0)));
1617 if (lastIsUnbounded)
1618 bindingFlagsVec->back() |= VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT;
1620 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1622 if (bindingNeedsAliasing[bindingIdx])
1623 bindingFlagsVec->at(bindingIdx) |= VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
1626 flagsCreateInfo->bindingCount = static_cast<deUint32>(bindingFlagsVec->size());
1627 flagsCreateInfo->pBindingFlags = de::dataOrNull(*bindingFlagsVec);
1630 const VkMutableDescriptorTypeCreateInfoVALVE createInfoValve = {
1631 VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
1632 flagsCreateInfo.get(),
1633 static_cast<deUint32>(mutableTypeLists.size()),
1634 de::dataOrNull(mutableTypeLists),
1637 const VkDescriptorSetLayoutCreateInfo layoutCreateInfo = {
1638 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, // VkStructureType sType;
1639 &createInfoValve, // const void* pNext;
1640 createFlags, // VkDescriptorSetLayoutCreateFlags flags;
1641 static_cast<deUint32>(bindingsVec.size()), // deUint32 bindingCount;
1642 de::dataOrNull(bindingsVec), // const VkDescriptorSetLayoutBinding* pBindings;
1647 VkDescriptorSetLayoutSupport support = initVulkanStructure();
1648 vkd.getDescriptorSetLayoutSupport(device, &layoutCreateInfo, &support);
1649 DescriptorSetLayoutResult result((support.supported == VK_TRUE) ? DescriptorSetLayoutResult::LayoutSupported::YES
1650 : DescriptorSetLayoutResult::LayoutSupported::NO);
1655 DescriptorSetLayoutResult result(createDescriptorSetLayout(vkd, device, &layoutCreateInfo));
1661 Move<VkDescriptorSetLayout> makeDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1663 return makeOrCheckDescriptorSetLayout(false /*checkOnly*/, vkd, device, stageFlags, createFlags).layout;
1666 bool checkDescriptorSetLayout (const DeviceInterface& vkd, VkDevice device, VkShaderStageFlags stageFlags, VkDescriptorSetLayoutCreateFlags createFlags) const
1668 return (makeOrCheckDescriptorSetLayout(true /*checkOnly*/, vkd, device, stageFlags, createFlags).supported == DescriptorSetLayoutResult::LayoutSupported::YES);
1671 size_t numDescriptors () const
1674 for (const auto& b : bindings)
1679 std::vector<Resource> createResources (const DeviceInterface& vkd, VkDevice device, Allocator& alloc, deUint32 qIndex, VkQueue queue, deUint32 iteration, bool useAABBs) const
1681 // Create resources for each binding.
1682 std::vector<Resource> result;
1683 result.reserve(numDescriptors());
1685 const auto bindingsCount = static_cast<deUint32>(bindings.size());
1687 for (deUint32 bindingIdx = 0u; bindingIdx < bindingsCount; ++bindingIdx)
1689 const auto& binding = bindings[bindingIdx];
1690 auto bindingResources = binding->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs, getDescriptorNumericValue(iteration, bindingIdx));
1692 for (auto& resource : bindingResources)
1693 result.emplace_back(std::move(resource));
1699 // Updates a descriptor set with the given resources. Note: the set must have been created with a layout that's compatible with this object.
1700 void updateDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet set, deUint32 iteration, const std::vector<Resource>& resources) const
1702 // Make sure the number of resources is correct.
1703 const auto numResources = resources.size();
1704 DE_ASSERT(numDescriptors() == numResources);
1706 std::vector<VkWriteDescriptorSet> descriptorWrites;
1707 descriptorWrites.reserve(numResources);
1709 std::vector<VkDescriptorImageInfo> imageInfoVec;
1710 std::vector<VkDescriptorBufferInfo> bufferInfoVec;
1711 std::vector<VkBufferView> bufferViewVec;
1712 std::vector<VkWriteDescriptorSetAccelerationStructureKHR> asWriteVec;
1713 size_t resourceIdx = 0;
1715 // We'll be storing pointers to elements of these vectors as we're appending elements, so we need their addresses to be stable.
1716 imageInfoVec.reserve(numResources);
1717 bufferInfoVec.reserve(numResources);
1718 bufferViewVec.reserve(numResources);
1719 asWriteVec.reserve(numResources);
1721 for (size_t bindingIdx = 0; bindingIdx < bindings.size(); ++bindingIdx)
1723 const auto& binding = bindings[bindingIdx];
1724 const auto descriptorTypes = binding->typesAtIteration(iteration);
1726 for (size_t descriptorIdx = 0; descriptorIdx < binding->size(); ++descriptorIdx)
1728 // Make sure the resource type matches the expected value.
1729 const auto& resource = resources[resourceIdx];
1730 const auto& descriptorType = descriptorTypes[descriptorIdx];
1732 DE_ASSERT(resource.descriptorType == descriptorType);
1734 // Obtain the descriptor write info for the resource.
1735 const auto writeInfo = resource.makeWriteInfo();
1737 switch (writeInfo.writeType)
1739 case WriteType::IMAGE_INFO: imageInfoVec.push_back(writeInfo.imageInfo); break;
1740 case WriteType::BUFFER_INFO: bufferInfoVec.push_back(writeInfo.bufferInfo); break;
1741 case WriteType::BUFFER_VIEW: bufferViewVec.push_back(writeInfo.bufferView); break;
1742 case WriteType::ACCELERATION_STRUCTURE_INFO: asWriteVec.push_back(writeInfo.asInfo); break;
1743 default: DE_ASSERT(false); break;
1746 // Add a new VkWriteDescriptorSet struct or extend the last one with more info. This helps us exercise different implementation code paths.
1747 bool extended = false;
1749 if (!descriptorWrites.empty() && descriptorIdx > 0)
1751 auto& last = descriptorWrites.back();
1752 if (last.dstSet == set /* this should always be true */ &&
1753 last.dstBinding == bindingIdx && (last.dstArrayElement + last.descriptorCount) == descriptorIdx &&
1754 last.descriptorType == descriptorType &&
1755 writeInfo.writeType != WriteType::ACCELERATION_STRUCTURE_INFO)
1757 // The new write should be in the same vector (imageInfoVec, bufferInfoVec or bufferViewVec) so increasing the count works.
1758 ++last.descriptorCount;
1765 const VkWriteDescriptorSet write = {
1766 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
1767 ((writeInfo.writeType == WriteType::ACCELERATION_STRUCTURE_INFO) ? &asWriteVec.back() : nullptr),
1769 static_cast<deUint32>(bindingIdx),
1770 static_cast<deUint32>(descriptorIdx),
1773 (writeInfo.writeType == WriteType::IMAGE_INFO ? &imageInfoVec.back() : nullptr),
1774 (writeInfo.writeType == WriteType::BUFFER_INFO ? &bufferInfoVec.back() : nullptr),
1775 (writeInfo.writeType == WriteType::BUFFER_VIEW ? &bufferViewVec.back() : nullptr),
1777 descriptorWrites.push_back(write);
1784 // Finally, update descriptor set with all the writes.
1785 vkd.updateDescriptorSets(device, static_cast<deUint32>(descriptorWrites.size()), de::dataOrNull(descriptorWrites), 0u, nullptr);
1788 // Copies between descriptor sets. They must be compatible and related to this set.
1789 void copyDescriptorSet (const DeviceInterface& vkd, VkDevice device, VkDescriptorSet srcSet, VkDescriptorSet dstSet) const
1791 std::vector<VkCopyDescriptorSet> copies;
1793 for (size_t bindingIdx = 0; bindingIdx < numBindings(); ++bindingIdx)
1795 const auto& binding = getBinding(bindingIdx);
1796 const auto bindingNumber = static_cast<deUint32>(bindingIdx);
1797 const auto descriptorCount = static_cast<deUint32>(binding->size());
1799 const VkCopyDescriptorSet copy =
1801 VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
1803 // set, binding, array element.
1804 srcSet, bindingNumber, 0u,
1805 dstSet, bindingNumber, 0u,
1809 copies.push_back(copy);
1812 vkd.updateDescriptorSets(device, 0u, nullptr, static_cast<deUint32>(copies.size()), de::dataOrNull(copies));
1815 // Does any binding in the set need aliasing in a given iteration?
1816 bool needsAliasing (deUint32 iteration) const
1818 std::vector<bool> aliasingNeededFlags;
1819 aliasingNeededFlags.reserve(bindings.size());
1821 std::transform(begin(bindings), end(bindings), std::back_inserter(aliasingNeededFlags),
1822 [iteration] (const BindingInterfacePtr& b) { return b->needsAliasing(iteration); });
1823 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1826 // Does any binding in the set need aliasing in any iteration?
1827 bool needsAnyAliasing () const
1829 const auto numIterations = maxTypes();
1830 std::vector<bool> aliasingNeededFlags (numIterations, false);
1832 for (deUint32 iteration = 0; iteration < numIterations; ++iteration)
1833 aliasingNeededFlags[iteration] = needsAliasing(iteration);
1835 return std::any_of(begin(aliasingNeededFlags), end(aliasingNeededFlags), [] (bool f) { return f; });
1838 // Is the last binding an unbounded array?
1839 bool lastBindingIsUnbounded () const
1841 if (bindings.empty())
1843 return bindings.back()->isUnbounded();
1846 // Get the variable descriptor count for the last binding if any.
1847 tcu::Maybe<deUint32> getVariableDescriptorCount () const
1849 if (lastBindingIsUnbounded())
1850 return tcu::just(static_cast<deUint32>(bindings.back()->size()));
1851 return tcu::Nothing;
1854 // Check if the set contains a descriptor type of the given type at the given iteration.
1855 bool containsTypeAtIteration (VkDescriptorType descriptorType, deUint32 iteration) const
1857 return std::any_of(begin(bindings), end(bindings),
1858 [descriptorType, iteration] (const BindingInterfacePtr& b) {
1859 const auto types = b->typesAtIteration(iteration);
1860 return de::contains(begin(types), end(types), descriptorType);
1864 // Is any binding an array?
1865 bool hasArrays () const
1867 return std::any_of(begin(bindings), end(bindings), [] (const BindingInterfacePtr& b) { return b->isArray(); });
1871 enum class UpdateType
1877 enum class SourceSetType
1884 enum class UpdateMoment
1890 enum class TestingStage
1906 enum class ArrayAccessType
1913 // Are we testing a ray tracing pipeline stage?
1914 bool isRayTracingStage (TestingStage stage)
1918 case TestingStage::RAY_GEN:
1919 case TestingStage::INTERSECTION:
1920 case TestingStage::ANY_HIT:
1921 case TestingStage::CLOSEST_HIT:
1922 case TestingStage::MISS:
1923 case TestingStage::CALLABLE:
1934 DescriptorSetPtr descriptorSet;
1935 UpdateType updateType;
1936 SourceSetStrategy sourceSetStrategy;
1937 SourceSetType sourceSetType;
1938 PoolMutableStrategy poolMutableStrategy;
1939 UpdateMoment updateMoment;
1940 ArrayAccessType arrayAccessType;
1941 TestingStage testingStage;
1943 VkShaderStageFlags getStageFlags () const
1945 VkShaderStageFlags flags = 0u;
1947 switch (testingStage)
1949 case TestingStage::COMPUTE: flags |= VK_SHADER_STAGE_COMPUTE_BIT; break;
1950 case TestingStage::VERTEX: flags |= VK_SHADER_STAGE_VERTEX_BIT; break;
1951 case TestingStage::TESS_EVAL: flags |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT; break;
1952 case TestingStage::TESS_CONTROL: flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT; break;
1953 case TestingStage::GEOMETRY: flags |= VK_SHADER_STAGE_GEOMETRY_BIT; break;
1954 case TestingStage::FRAGMENT: flags |= VK_SHADER_STAGE_FRAGMENT_BIT; break;
1955 case TestingStage::RAY_GEN: flags |= VK_SHADER_STAGE_RAYGEN_BIT_KHR; break;
1956 case TestingStage::INTERSECTION: flags |= VK_SHADER_STAGE_INTERSECTION_BIT_KHR; break;
1957 case TestingStage::ANY_HIT: flags |= VK_SHADER_STAGE_ANY_HIT_BIT_KHR; break;
1958 case TestingStage::CLOSEST_HIT: flags |= VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR; break;
1959 case TestingStage::MISS: flags |= VK_SHADER_STAGE_MISS_BIT_KHR; break;
1960 case TestingStage::CALLABLE: flags |= VK_SHADER_STAGE_CALLABLE_BIT_KHR; break;
1969 VkPipelineStageFlags getPipelineWriteStage () const
1971 VkPipelineStageFlags flags = 0u;
1973 switch (testingStage)
1975 case TestingStage::COMPUTE: flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break;
1976 case TestingStage::VERTEX: flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT; break;
1977 case TestingStage::TESS_EVAL: flags |= VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT; break;
1978 case TestingStage::TESS_CONTROL: flags |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT; break;
1979 case TestingStage::GEOMETRY: flags |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT; break;
1980 case TestingStage::FRAGMENT: flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break;
1981 case TestingStage::RAY_GEN: // fallthrough
1982 case TestingStage::INTERSECTION: // fallthrough
1983 case TestingStage::ANY_HIT: // fallthrough
1984 case TestingStage::CLOSEST_HIT: // fallthrough
1985 case TestingStage::MISS: // fallthrough
1986 case TestingStage::CALLABLE: flags |= VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR; break;
1996 VkDescriptorSetLayoutCreateFlags getLayoutCreateFlags (bool isSourceSet) const
1998 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
1999 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2001 VkDescriptorSetLayoutCreateFlags createFlags = 0u;
2003 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2004 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT;
2006 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2007 createFlags |= VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE;
2013 VkDescriptorSetLayoutCreateFlags getSrcLayoutCreateFlags () const
2015 return getLayoutCreateFlags(true);
2018 VkDescriptorSetLayoutCreateFlags getDstLayoutCreateFlags () const
2020 return getLayoutCreateFlags(false);
2024 VkDescriptorPoolCreateFlags getPoolCreateFlags (bool isSourceSet) const
2026 // UPDATE_AFTER_BIND cannot be used with HOST_ONLY sets.
2027 //DE_ASSERT(!(updateMoment == UpdateMoment::UPDATE_AFTER_BIND && sourceSetType == SourceSetType::HOST_ONLY));
2029 VkDescriptorPoolCreateFlags poolCreateFlags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
2031 if ((!isSourceSet || sourceSetType != SourceSetType::HOST_ONLY) && updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2032 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT;
2034 if (isSourceSet && sourceSetType == SourceSetType::HOST_ONLY)
2035 poolCreateFlags |= VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE;
2037 return poolCreateFlags;
2041 VkDescriptorPoolCreateFlags getSrcPoolCreateFlags () const
2043 return getPoolCreateFlags(true);
2046 VkDescriptorPoolCreateFlags getDstPoolCreateFlags () const
2048 return getPoolCreateFlags(false);
2051 VkPipelineBindPoint getBindPoint () const
2053 if (testingStage == TestingStage::COMPUTE)
2054 return VK_PIPELINE_BIND_POINT_COMPUTE;
2055 if (isRayTracingStage(testingStage))
2056 return VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
2057 return VK_PIPELINE_BIND_POINT_GRAPHICS;
2061 class MutableTypesTest : public TestCase
2064 MutableTypesTest (tcu::TestContext& testCtx, const std::string& name, const std::string& description, const TestParams& params)
2065 : TestCase(testCtx, name, description)
2069 ~MutableTypesTest () override = default;
2071 void initPrograms (vk::SourceCollections& programCollection) const override;
2072 TestInstance* createInstance (Context& context) const override;
2073 void checkSupport (Context& context) const override;
2076 TestParams m_params;
2079 class MutableTypesInstance : public TestInstance
2082 MutableTypesInstance (Context& context, const TestParams& params)
2083 : TestInstance (context)
2087 ~MutableTypesInstance () override = default;
2089 tcu::TestStatus iterate () override;
2092 TestParams m_params;
2095 // Check if a descriptor set contains a given descriptor type in any iteration up to maxTypes().
2096 bool containsAnyDescriptorType (const DescriptorSet& descriptorSet, VkDescriptorType descriptorType)
2098 const auto numIterations = descriptorSet.maxTypes();
2100 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2102 if (descriptorSet.containsTypeAtIteration(descriptorType, iter))
2109 // Check if testing this descriptor set needs an external image (for sampler descriptors).
2110 bool needsExternalImage (const DescriptorSet& descriptorSet)
2112 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLER);
2115 // Check if testing this descriptor set needs an external sampler (for sampled images).
2116 bool needsExternalSampler (const DescriptorSet& descriptorSet)
2118 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
2121 // Check if this descriptor set contains a input attachments.
2122 bool usesInputAttachments (const DescriptorSet& descriptorSet)
2124 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2127 // Check if this descriptor set contains acceleration structures.
2128 bool usesAccelerationStructures (const DescriptorSet& descriptorSet)
2130 return containsAnyDescriptorType(descriptorSet, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
2133 std::string shaderName (deUint32 iteration)
2135 return ("iteration-" + de::toString(iteration));
2138 void MutableTypesTest::initPrograms (vk::SourceCollections& programCollection) const
2140 const bool usePushConstants = (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT);
2141 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
2142 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
2143 const bool rayQueries = usesAccelerationStructures(*m_params.descriptorSet);
2144 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2145 const auto numIterations = m_params.descriptorSet->maxTypes();
2146 const auto numBindings = m_params.descriptorSet->numBindings();
2147 const vk::ShaderBuildOptions rtBuildOptions (programCollection.usedVulkanVersion, vk::SPIRV_VERSION_1_4, 0u, true);
2149 // Extra set and bindings for external resources.
2150 std::ostringstream extraSet;
2151 deUint32 extraBindings = 0u;
2153 extraSet << "layout (set=1, binding=" << extraBindings++ << ") buffer OutputBufferBlock { uint value[" << numIterations << "]; } outputBuffer;\n";
2154 if (useExternalImage)
2155 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform utexture2D externalSampledImage;\n";
2156 if (useExternalSampler)
2157 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform sampler externalSampler;\n";
2158 // The extra binding below will be declared in the "passthrough" ray generation shader.
2161 extraSet << "layout (set=1, binding=" << extraBindings++ << ") uniform accelerationStructureEXT externalAS;\n";
2164 // Common vertex preamble.
2165 std::ostringstream vertexPreamble;
2167 << "vec2 vertexPositions[3] = vec2[](\n"
2168 << " vec2(0.0, -0.5),\n"
2169 << " vec2(0.5, 0.5),\n"
2170 << " vec2(-0.5, 0.5)\n"
2174 // Vertex shader body common statements.
2175 std::ostringstream vertexBodyCommon;
2176 vertexBodyCommon << " gl_Position = vec4(vertexPositions[gl_VertexIndex], 0.0, 1.0);\n";
2178 // Common tessellation control preamble.
2179 std::ostringstream tescPreamble;
2181 << "layout (vertices=3) out;\n"
2182 << "in gl_PerVertex\n"
2184 << " vec4 gl_Position;\n"
2185 << "} gl_in[gl_MaxPatchVertices];\n"
2186 << "out gl_PerVertex\n"
2188 << " vec4 gl_Position;\n"
2192 // Common tessellation control body.
2193 std::ostringstream tescBodyCommon;
2195 << " gl_TessLevelInner[0] = 1.0;\n"
2196 << " gl_TessLevelInner[1] = 1.0;\n"
2197 << " gl_TessLevelOuter[0] = 1.0;\n"
2198 << " gl_TessLevelOuter[1] = 1.0;\n"
2199 << " gl_TessLevelOuter[2] = 1.0;\n"
2200 << " gl_TessLevelOuter[3] = 1.0;\n"
2201 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2204 // Common tessellation evaluation preamble.
2205 std::ostringstream tesePreamble;
2207 << "layout (triangles, fractional_odd_spacing, cw) in;\n"
2208 << "in gl_PerVertex\n"
2210 << " vec4 gl_Position;\n"
2211 << "} gl_in[gl_MaxPatchVertices];\n"
2212 << "out gl_PerVertex\n"
2214 << " vec4 gl_Position;\n"
2218 // Common tessellation evaluation body.
2219 std::ostringstream teseBodyCommon;
2221 << " gl_Position = (gl_TessCoord.x * gl_in[0].gl_Position) +\n"
2222 << " (gl_TessCoord.y * gl_in[1].gl_Position) +\n"
2223 << " (gl_TessCoord.z * gl_in[2].gl_Position);\n"
2227 std::ostringstream preamble;
2231 << "#extension GL_EXT_nonuniform_qualifier : enable\n"
2232 << "#extension GL_EXT_debug_printf : enable\n"
2233 << (rayTracing ? "#extension GL_EXT_ray_tracing : enable\n" : "")
2234 << (rayQueries ? "#extension GL_EXT_ray_query : enable\n" : "")
2238 if (m_params.testingStage == TestingStage::VERTEX)
2240 preamble << vertexPreamble.str();
2242 else if (m_params.testingStage == TestingStage::COMPUTE)
2245 << "layout (local_size_x=1, local_size_y=1, local_size_z=1) in;\n"
2249 else if (m_params.testingStage == TestingStage::GEOMETRY)
2252 << "layout (triangles) in;\n"
2253 << "layout (triangle_strip, max_vertices=3) out;\n"
2254 << "in gl_PerVertex\n"
2256 << " vec4 gl_Position;\n"
2258 << "out gl_PerVertex\n"
2260 << " vec4 gl_Position;\n"
2264 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2266 preamble << tescPreamble.str();
2268 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2270 preamble << tesePreamble.str();
2272 else if (m_params.testingStage == TestingStage::CALLABLE)
2274 preamble << "layout (location=0) callableDataInEXT float unusedCallableData;\n";
2276 else if (m_params.testingStage == TestingStage::CLOSEST_HIT ||
2277 m_params.testingStage == TestingStage::ANY_HIT ||
2278 m_params.testingStage == TestingStage::MISS)
2280 preamble << "layout (location=0) rayPayloadInEXT float unusedRayPayload;\n";
2282 else if (m_params.testingStage == TestingStage::INTERSECTION)
2284 preamble << "hitAttributeEXT vec3 hitAttribute;\n";
2287 preamble << extraSet.str();
2288 if (usePushConstants)
2289 preamble << "layout (push_constant, std430) uniform PushConstantBlock { uint zero; } pc;\n";
2292 // We need to create a shader per iteration.
2293 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2296 std::ostringstream shader;
2297 shader << preamble.str();
2299 deUint32 inputAttachmentCount = 0u;
2301 // Descriptor declarations for this iteration.
2302 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2304 DE_ASSERT(bindingIdx <= std::numeric_limits<deUint32>::max());
2306 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2307 const auto bindingTypes = binding->typesAtIteration(iter);
2308 const auto hasInputAttachment = de::contains(begin(bindingTypes), end(bindingTypes), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2309 const auto isArray = binding->isArray();
2310 const auto isUnbounded = binding->isUnbounded();
2311 const auto bindingSize = binding->size();
2313 // If the binding is an input attachment, make sure it's not an array.
2314 DE_ASSERT(!hasInputAttachment || !isArray);
2316 // Make sure the descriptor count fits a deInt32 if needed.
2317 DE_ASSERT(!isArray || isUnbounded || bindingSize <= static_cast<size_t>(std::numeric_limits<deInt32>::max()));
2319 const auto arraySize = (isArray ? (isUnbounded ? tcu::just(deInt32{-1}) : tcu::just(static_cast<deInt32>(bindingSize)))
2322 shader << binding->glslDeclarations(iter, 0u, static_cast<deUint32>(bindingIdx), inputAttachmentCount, arraySize);
2324 if (hasInputAttachment)
2325 ++inputAttachmentCount;
2331 << "void main() {\n"
2332 // This checks if we are the first invocation to arrive here, so the checks are executed only once.
2333 << " const uint flag = atomicCompSwap(outputBuffer.value[" << iter << "], 0u, 1u);\n"
2334 << " if (flag == 0u) {\n"
2335 << " uint anyError = 0u;\n"
2338 for (size_t bindingIdx = 0; bindingIdx < numBindings; ++bindingIdx)
2340 const auto binding = m_params.descriptorSet->getBinding(bindingIdx);
2341 const auto idx32 = static_cast<deUint32>(bindingIdx);
2342 shader << binding->glslCheckStatements(iter, 0u, idx32, getDescriptorNumericValue(iter, idx32), tcu::Nothing, usePushConstants);
2346 << " if (anyError == 0u) {\n"
2347 << " atomicAdd(outputBuffer.value[" << iter << "], 1u);\n"
2349 << " }\n" // Closes if (flag == 0u).
2352 if (m_params.testingStage == TestingStage::VERTEX)
2354 shader << vertexBodyCommon.str();
2356 else if (m_params.testingStage == TestingStage::GEOMETRY)
2359 << " gl_Position = gl_in[0].gl_Position; EmitVertex();\n"
2360 << " gl_Position = gl_in[1].gl_Position; EmitVertex();\n"
2361 << " gl_Position = gl_in[2].gl_Position; EmitVertex();\n"
2364 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2366 shader << tescBodyCommon.str();
2368 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2370 shader << teseBodyCommon.str();
2374 << "}\n" // End of main().
2378 const auto shaderNameStr = shaderName(iter);
2379 const auto shaderStr = shader.str();
2380 auto& glslSource = programCollection.glslSources.add(shaderNameStr);
2382 if (m_params.testingStage == TestingStage::COMPUTE)
2383 glslSource << glu::ComputeSource(shaderStr);
2384 else if (m_params.testingStage == TestingStage::VERTEX)
2385 glslSource << glu::VertexSource(shaderStr);
2386 else if (m_params.testingStage == TestingStage::FRAGMENT)
2387 glslSource << glu::FragmentSource(shaderStr);
2388 else if (m_params.testingStage == TestingStage::GEOMETRY)
2389 glslSource << glu::GeometrySource(shaderStr);
2390 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
2391 glslSource << glu::TessellationControlSource(shaderStr);
2392 else if (m_params.testingStage == TestingStage::TESS_EVAL)
2393 glslSource << glu::TessellationEvaluationSource(shaderStr);
2394 else if (m_params.testingStage == TestingStage::RAY_GEN)
2395 glslSource << glu::RaygenSource(updateRayTracingGLSL(shaderStr));
2396 else if (m_params.testingStage == TestingStage::INTERSECTION)
2397 glslSource << glu::IntersectionSource(updateRayTracingGLSL(shaderStr));
2398 else if (m_params.testingStage == TestingStage::ANY_HIT)
2399 glslSource << glu::AnyHitSource(updateRayTracingGLSL(shaderStr));
2400 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
2401 glslSource << glu::ClosestHitSource(updateRayTracingGLSL(shaderStr));
2402 else if (m_params.testingStage == TestingStage::MISS)
2403 glslSource << glu::MissSource(updateRayTracingGLSL(shaderStr));
2404 else if (m_params.testingStage == TestingStage::CALLABLE)
2405 glslSource << glu::CallableSource(updateRayTracingGLSL(shaderStr));
2409 if (rayTracing || rayQueries)
2410 glslSource << rtBuildOptions;
2414 if (m_params.testingStage == TestingStage::FRAGMENT
2415 || m_params.testingStage == TestingStage::GEOMETRY
2416 || m_params.testingStage == TestingStage::TESS_CONTROL
2417 || m_params.testingStage == TestingStage::TESS_EVAL)
2419 // Add passthrough vertex shader that works for points.
2420 std::ostringstream vertPassthrough;
2423 << "out gl_PerVertex\n"
2425 << " vec4 gl_Position;\n"
2427 << vertexPreamble.str()
2428 << "void main() {\n"
2429 << vertexBodyCommon.str()
2432 programCollection.glslSources.add("vert") << glu::VertexSource(vertPassthrough.str());
2435 if (m_params.testingStage == TestingStage::TESS_CONTROL)
2437 // Add passthrough tessellation evaluation shader.
2438 std::ostringstream tesePassthrough;
2441 << tesePreamble.str()
2442 << "void main (void)\n"
2444 << teseBodyCommon.str()
2448 programCollection.glslSources.add("tese") << glu::TessellationEvaluationSource(tesePassthrough.str());
2451 if (m_params.testingStage == TestingStage::TESS_EVAL)
2453 // Add passthrough tessellation control shader.
2454 std::ostringstream tescPassthrough;
2457 << tescPreamble.str()
2458 << "void main (void)\n"
2460 << tescBodyCommon.str()
2464 programCollection.glslSources.add("tesc") << glu::TessellationControlSource(tescPassthrough.str());
2467 if (rayTracing && m_params.testingStage != TestingStage::RAY_GEN)
2469 // Add a "passthrough" ray generation shader.
2470 std::ostringstream rgen;
2472 << "#version 460 core\n"
2473 << "#extension GL_EXT_ray_tracing : require\n"
2474 << "layout (set=1, binding=" << extraBindings << ") uniform accelerationStructureEXT externalAS;\n"
2475 << ((m_params.testingStage == TestingStage::CALLABLE)
2476 ? "layout (location=0) callableDataEXT float unusedCallableData;\n"
2477 : "layout (location=0) rayPayloadEXT float unusedRayPayload;\n")
2483 if (m_params.testingStage == TestingStage::INTERSECTION
2484 || m_params.testingStage == TestingStage::ANY_HIT
2485 || m_params.testingStage == TestingStage::CLOSEST_HIT
2486 || m_params.testingStage == TestingStage::MISS)
2488 // We need to trace rays in this case to get hits or misses.
2489 const auto zDir = ((m_params.testingStage == TestingStage::MISS) ? "-1.0" : "1.0");
2492 << " const uint cullMask = 0xFF;\n"
2493 << " const float tMin = 1.0;\n"
2494 << " const float tMax = 10.0;\n"
2495 << " const vec3 origin = vec3(0.0, 0.0, 0.0);\n"
2496 << " const vec3 direction = vec3(0.0, 0.0, " << zDir << ");\n"
2497 << " traceRayEXT(externalAS, gl_RayFlagsNoneEXT, cullMask, 0, 0, 0, origin, tMin, direction, tMax, 0);\n"
2501 else if (m_params.testingStage == TestingStage::CALLABLE)
2503 rgen << " executeCallableEXT(0, 0);\n";
2509 programCollection.glslSources.add("rgen") << glu::RaygenSource(updateRayTracingGLSL(rgen.str())) << rtBuildOptions;
2511 // Intersection shaders will ignore the intersection, so we need a passthrough miss shader.
2512 if (m_params.testingStage == TestingStage::INTERSECTION)
2514 std::ostringstream miss;
2516 << "#version 460 core\n"
2517 << "#extension GL_EXT_ray_tracing : require\n"
2518 << "layout (location=0) rayPayloadEXT float unusedRayPayload;\n"
2525 programCollection.glslSources.add("miss") << glu::MissSource(updateRayTracingGLSL(miss.str())) << rtBuildOptions;
2530 TestInstance* MutableTypesTest::createInstance (Context& context) const
2532 return new MutableTypesInstance(context, m_params);
2535 void requirePartiallyBound (Context& context)
2537 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2538 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2539 if (!indexingFeatures.descriptorBindingPartiallyBound)
2540 TCU_THROW(NotSupportedError, "Partially bound bindings not supported");
2543 void requireVariableDescriptorCount (Context& context)
2545 context.requireDeviceFunctionality("VK_EXT_descriptor_indexing");
2546 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2547 if (!indexingFeatures.descriptorBindingVariableDescriptorCount)
2548 TCU_THROW(NotSupportedError, "Variable descriptor count not supported");
2551 // Calculates the set of used descriptor types for a given set and iteration count, for bindings matching a predicate.
2552 std::set<VkDescriptorType> getUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations, bool (*predicate)(const BindingInterface* binding))
2554 std::set<VkDescriptorType> usedDescriptorTypes;
2556 for (size_t bindingIdx = 0; bindingIdx < descriptorSet.numBindings(); ++bindingIdx)
2558 const auto bindingPtr = descriptorSet.getBinding(bindingIdx);
2559 if (predicate(bindingPtr))
2561 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2563 const auto descTypes = bindingPtr->typesAtIteration(iter);
2564 usedDescriptorTypes.insert(begin(descTypes), end(descTypes));
2569 return usedDescriptorTypes;
2572 std::set<VkDescriptorType> getAllUsedDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2574 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface*) { return true; });
2577 std::set<VkDescriptorType> getUsedArrayDescriptorTypes (const DescriptorSet& descriptorSet, deUint32 numIterations)
2579 return getUsedDescriptorTypes(descriptorSet, numIterations, [] (const BindingInterface* b) { return b->isArray(); });
2582 // Are we testing a vertex pipeline stage?
2583 bool isVertexStage (TestingStage stage)
2587 case TestingStage::VERTEX:
2588 case TestingStage::TESS_CONTROL:
2589 case TestingStage::TESS_EVAL:
2590 case TestingStage::GEOMETRY:
2599 void MutableTypesTest::checkSupport (Context& context) const
2601 context.requireDeviceFunctionality("VK_VALVE_mutable_descriptor_type");
2603 // Check ray tracing if needed.
2604 const bool rayTracing = isRayTracingStage(m_params.testingStage);
2608 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2609 context.requireDeviceFunctionality("VK_KHR_ray_tracing_pipeline");
2612 // Check if ray queries are needed. Ray queries are used to verify acceleration structure descriptors.
2613 const bool rayQueriesNeeded = usesAccelerationStructures(*m_params.descriptorSet);
2614 if (rayQueriesNeeded)
2616 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2617 context.requireDeviceFunctionality("VK_KHR_ray_query");
2620 // We'll use iterations to check each mutable type, as needed.
2621 const auto numIterations = m_params.descriptorSet->maxTypes();
2623 if (m_params.descriptorSet->lastBindingIsUnbounded())
2624 requireVariableDescriptorCount(context);
2626 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2628 if (m_params.descriptorSet->needsAliasing(iter))
2630 requirePartiallyBound(context);
2635 if (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND)
2637 // Check update after bind for each used descriptor type.
2638 const auto& usedDescriptorTypes = getAllUsedDescriptorTypes(*m_params.descriptorSet, numIterations);
2639 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2641 for (const auto& descType : usedDescriptorTypes)
2645 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2646 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2647 if (!indexingFeatures.descriptorBindingUniformBufferUpdateAfterBind)
2648 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform buffers");
2651 case VK_DESCRIPTOR_TYPE_SAMPLER:
2652 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2653 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2654 if (!indexingFeatures.descriptorBindingSampledImageUpdateAfterBind)
2655 TCU_THROW(NotSupportedError, "Update-after-bind not supported for samplers and sampled images");
2658 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2659 if (!indexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
2660 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage images");
2663 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2664 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2665 if (!indexingFeatures.descriptorBindingStorageBufferUpdateAfterBind)
2666 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage buffers");
2669 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2670 if (!indexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind)
2671 TCU_THROW(NotSupportedError, "Update-after-bind not supported for uniform texel buffers");
2674 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2675 if (!indexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind)
2676 TCU_THROW(NotSupportedError, "Update-after-bind not supported for storage texel buffers");
2679 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2680 TCU_THROW(InternalError, "Tests do not support update-after-bind with input attachments");
2682 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
2684 // Just in case we ever mix some of these in.
2685 context.requireDeviceFunctionality("VK_EXT_inline_uniform_block");
2686 const auto& iubFeatures = context.getInlineUniformBlockFeatures();
2687 if (!iubFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind)
2688 TCU_THROW(NotSupportedError, "Update-after-bind not supported for inline uniform blocks");
2692 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2694 // Just in case we ever mix some of these in.
2695 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2696 const auto& asFeatures = context.getAccelerationStructureFeatures();
2697 if (!asFeatures.descriptorBindingAccelerationStructureUpdateAfterBind)
2698 TCU_THROW(NotSupportedError, "Update-after-bind not supported for acceleration structures");
2702 case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:
2703 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_VALVE in list of used descriptor types");
2706 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2711 if (m_params.arrayAccessType == ArrayAccessType::PUSH_CONSTANT)
2713 // These require dynamically uniform indices.
2714 const auto& usedDescriptorTypes = getUsedArrayDescriptorTypes(*m_params.descriptorSet, numIterations);
2715 const auto& features = context.getDeviceFeatures();
2716 const auto descriptorIndexingSupported = context.isDeviceFunctionalitySupported("VK_EXT_descriptor_indexing");
2717 const auto& indexingFeatures = context.getDescriptorIndexingFeatures();
2719 for (const auto& descType : usedDescriptorTypes)
2723 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2724 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2725 if (!features.shaderUniformBufferArrayDynamicIndexing)
2726 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform buffers");
2729 case VK_DESCRIPTOR_TYPE_SAMPLER:
2730 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2731 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2732 if (!features.shaderSampledImageArrayDynamicIndexing)
2733 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for samplers and sampled images");
2736 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2737 if (!features.shaderStorageImageArrayDynamicIndexing)
2738 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage images");
2741 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2742 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2743 if (!features.shaderStorageBufferArrayDynamicIndexing)
2744 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage buffers");
2747 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2748 if (!descriptorIndexingSupported || !indexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing)
2749 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for uniform texel buffers");
2752 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2753 if (!descriptorIndexingSupported || !indexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing)
2754 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for storage texel buffers");
2757 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2758 if (!descriptorIndexingSupported || !indexingFeatures.shaderInputAttachmentArrayDynamicIndexing)
2759 TCU_THROW(NotSupportedError, "Dynamic indexing not supported for input attachments");
2762 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2763 context.requireDeviceFunctionality("VK_KHR_acceleration_structure");
2766 case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:
2767 TCU_THROW(InternalError, "Found VK_DESCRIPTOR_TYPE_MUTABLE_VALVE in list of used array descriptor types");
2770 TCU_THROW(InternalError, "Unexpected descriptor type found in list of used descriptor types: " + de::toString(descType));
2775 // Check layout support.
2777 const auto& vkd = context.getDeviceInterface();
2778 const auto device = context.getDevice();
2779 const auto stageFlags = m_params.getStageFlags();
2782 const auto layoutCreateFlags = m_params.getDstLayoutCreateFlags();
2783 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2786 TCU_THROW(NotSupportedError, "Required descriptor set layout not supported");
2789 if (m_params.updateType == UpdateType::COPY)
2791 const auto layoutCreateFlags = m_params.getSrcLayoutCreateFlags();
2792 const auto supported = m_params.descriptorSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2795 TCU_THROW(NotSupportedError, "Required descriptor set layout for source set not supported");
2797 // Check specific layouts for the different source sets are supported.
2798 for (deUint32 iter = 0u; iter < numIterations; ++iter)
2800 const auto srcSet = m_params.descriptorSet->genSourceSet(m_params.sourceSetStrategy, iter);
2801 const auto srcLayoutSupported = srcSet->checkDescriptorSetLayout(vkd, device, stageFlags, layoutCreateFlags);
2803 if (!srcLayoutSupported)
2804 TCU_THROW(NotSupportedError, "Descriptor set layout for source set at iteration " + de::toString(iter) + " not supported");
2809 // Check supported stores and stages.
2810 const bool vertexStage = isVertexStage(m_params.testingStage);
2811 const bool fragmentStage = (m_params.testingStage == TestingStage::FRAGMENT);
2812 const bool geometryStage = (m_params.testingStage == TestingStage::GEOMETRY);
2813 const bool tessellation = (m_params.testingStage == TestingStage::TESS_CONTROL || m_params.testingStage == TestingStage::TESS_EVAL);
2815 const auto& features = context.getDeviceFeatures();
2817 if (vertexStage && !features.vertexPipelineStoresAndAtomics)
2818 TCU_THROW(NotSupportedError, "Vertex pipeline stores and atomics not supported");
2820 if (fragmentStage && !features.fragmentStoresAndAtomics)
2821 TCU_THROW(NotSupportedError, "Fragment shader stores and atomics not supported");
2823 if (geometryStage && !features.geometryShader)
2824 TCU_THROW(NotSupportedError, "Geometry shader not supported");
2826 if (tessellation && !features.tessellationShader)
2827 TCU_THROW(NotSupportedError, "Tessellation shaders not supported");
2830 // What to do at each iteration step. Used to apply UPDATE_AFTER_BIND or not.
2837 // Create render pass.
2838 Move<VkRenderPass> buildRenderPass (const DeviceInterface& vkd, VkDevice device, const std::vector<Resource>& resources)
2840 const auto imageFormat = getDescriptorImageFormat();
2842 std::vector<VkAttachmentDescription> attachmentDescriptions;
2843 std::vector<VkAttachmentReference> attachmentReferences;
2844 std::vector<deUint32> attachmentIndices;
2846 for (const auto& resource : resources)
2848 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
2850 const auto nextIndex = static_cast<deUint32>(attachmentDescriptions.size());
2852 const VkAttachmentDescription description = {
2853 0u, // VkAttachmentDescriptionFlags flags;
2854 imageFormat, // VkFormat format;
2855 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
2856 VK_ATTACHMENT_LOAD_OP_LOAD, // VkAttachmentLoadOp loadOp;
2857 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp storeOp;
2858 VK_ATTACHMENT_LOAD_OP_DONT_CARE, // VkAttachmentLoadOp stencilLoadOp;
2859 VK_ATTACHMENT_STORE_OP_DONT_CARE, // VkAttachmentStoreOp stencilStoreOp;
2860 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout initialLayout;
2861 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout finalLayout;
2864 const VkAttachmentReference reference = { nextIndex, VK_IMAGE_LAYOUT_GENERAL };
2866 attachmentIndices.push_back(nextIndex);
2867 attachmentDescriptions.push_back(description);
2868 attachmentReferences.push_back(reference);
2872 const auto attachmentCount = static_cast<deUint32>(attachmentDescriptions.size());
2873 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentIndices.size()));
2874 DE_ASSERT(attachmentCount == static_cast<deUint32>(attachmentReferences.size()));
2876 const VkSubpassDescription subpassDescription =
2878 0u, // VkSubpassDescriptionFlags flags;
2879 VK_PIPELINE_BIND_POINT_GRAPHICS, // VkPipelineBindPoint pipelineBindPoint;
2880 attachmentCount, // deUint32 inputAttachmentCount;
2881 de::dataOrNull(attachmentReferences), // const VkAttachmentReference* pInputAttachments;
2882 0u, // deUint32 colorAttachmentCount;
2883 nullptr, // const VkAttachmentReference* pColorAttachments;
2884 0u, // const VkAttachmentReference* pResolveAttachments;
2885 nullptr, // const VkAttachmentReference* pDepthStencilAttachment;
2886 0u, // deUint32 preserveAttachmentCount;
2887 nullptr, // const deUint32* pPreserveAttachments;
2890 const VkRenderPassCreateInfo renderPassCreateInfo =
2892 VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, // VkStructureType sType;
2893 nullptr, // const void* pNext;
2894 0u, // VkRenderPassCreateFlags flags;
2895 static_cast<deUint32>(attachmentDescriptions.size()), // deUint32 attachmentCount;
2896 de::dataOrNull(attachmentDescriptions), // const VkAttachmentDescription* pAttachments;
2897 1u, // deUint32 subpassCount;
2898 &subpassDescription, // const VkSubpassDescription* pSubpasses;
2899 0u, // deUint32 dependencyCount;
2900 nullptr, // const VkSubpassDependency* pDependencies;
2903 return createRenderPass(vkd, device, &renderPassCreateInfo);
2906 // Create a graphics pipeline.
2907 Move<VkPipeline> buildGraphicsPipeline (const DeviceInterface& vkd, VkDevice device, VkPipelineLayout pipelineLayout,
2908 VkShaderModule vertModule,
2909 VkShaderModule tescModule,
2910 VkShaderModule teseModule,
2911 VkShaderModule geomModule,
2912 VkShaderModule fragModule,
2913 VkRenderPass renderPass)
2915 const auto extent = getDefaultExtent();
2916 const std::vector<VkViewport> viewports (1u, makeViewport(extent));
2917 const std::vector<VkRect2D> scissors (1u, makeRect2D(extent));
2918 const auto hasTess = (tescModule != DE_NULL || teseModule != DE_NULL);
2919 const auto topology = (hasTess ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST);
2922 const VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo = initVulkanStructure();
2924 const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo = {
2925 VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, // VkStructureType sType;
2926 nullptr, // const void* pNext;
2927 0u, // VkPipelineInputAssemblyStateCreateFlags flags;
2928 topology, // VkPrimitiveTopology topology;
2929 VK_FALSE, // VkBool32 primitiveRestartEnable;
2932 const VkPipelineTessellationStateCreateInfo tessellationStateCreateInfo = {
2933 VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, // VkStructureType sType;
2934 nullptr, // const void* pNext;
2935 0u, // VkPipelineTessellationStateCreateFlags flags;
2936 (hasTess ? 3u : 0u), // deUint32 patchControlPoints;
2939 const VkPipelineViewportStateCreateInfo viewportStateCreateInfo = {
2940 VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, // VkStructureType sType;
2941 nullptr, // const void* pNext;
2942 0u, // VkPipelineViewportStateCreateFlags flags;
2943 static_cast<deUint32>(viewports.size()), // deUint32 viewportCount;
2944 de::dataOrNull(viewports), // const VkViewport* pViewports;
2945 static_cast<deUint32>(scissors.size()), // deUint32 scissorCount;
2946 de::dataOrNull(scissors), // const VkRect2D* pScissors;
2949 const VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo = {
2950 VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, // VkStructureType sType;
2951 nullptr, // const void* pNext;
2952 0u, // VkPipelineRasterizationStateCreateFlags flags;
2953 VK_FALSE, // VkBool32 depthClampEnable;
2954 (fragModule == DE_NULL ? VK_TRUE : VK_FALSE), // VkBool32 rasterizerDiscardEnable;
2955 VK_POLYGON_MODE_FILL, // VkPolygonMode polygonMode;
2956 VK_CULL_MODE_NONE, // VkCullModeFlags cullMode;
2957 VK_FRONT_FACE_CLOCKWISE, // VkFrontFace frontFace;
2958 VK_FALSE, // VkBool32 depthBiasEnable;
2959 0.0f, // float depthBiasConstantFactor;
2960 0.0f, // float depthBiasClamp;
2961 0.0f, // float depthBiasSlopeFactor;
2962 1.0f, // float lineWidth;
2965 const VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo = {
2966 VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, // VkStructureType sType;
2967 nullptr, // const void* pNext;
2968 0u, // VkPipelineMultisampleStateCreateFlags flags;
2969 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits rasterizationSamples;
2970 VK_FALSE, // VkBool32 sampleShadingEnable;
2971 1.0f, // float minSampleShading;
2972 nullptr, // const VkSampleMask* pSampleMask;
2973 VK_FALSE, // VkBool32 alphaToCoverageEnable;
2974 VK_FALSE, // VkBool32 alphaToOneEnable;
2977 const VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo = initVulkanStructure();
2979 const VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo = initVulkanStructure();
2981 return makeGraphicsPipeline(vkd, device, pipelineLayout,
2982 vertModule, tescModule, teseModule, geomModule, fragModule,
2983 renderPass, 0u, &vertexInputStateCreateInfo, &inputAssemblyStateCreateInfo,
2984 (hasTess ? &tessellationStateCreateInfo : nullptr), &viewportStateCreateInfo,
2985 &rasterizationStateCreateInfo, &multisampleStateCreateInfo,
2986 &depthStencilStateCreateInfo, &colorBlendStateCreateInfo, nullptr);
2989 Move<VkFramebuffer> buildFramebuffer (const DeviceInterface& vkd, VkDevice device, VkRenderPass renderPass, const std::vector<Resource>& resources)
2991 const auto extent = getDefaultExtent();
2993 std::vector<VkImageView> inputAttachments;
2994 for (const auto& resource : resources)
2996 if (resource.descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
2997 inputAttachments.push_back(resource.imageView.get());
3000 const VkFramebufferCreateInfo framebufferCreateInfo =
3002 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, // VkStructureType sType;
3003 nullptr, // const void* pNext;
3004 0u, // VkFramebufferCreateFlags flags;
3005 renderPass, // VkRenderPass renderPass;
3006 static_cast<deUint32>(inputAttachments.size()), // deUint32 attachmentCount;
3007 de:: dataOrNull(inputAttachments), // const VkImageView* pAttachments;
3008 extent.width, // deUint32 width;
3009 extent.height, // deUint32 height;
3010 extent.depth, // deUint32 layers;
3013 return createFramebuffer(vkd, device, &framebufferCreateInfo);
3016 tcu::TestStatus MutableTypesInstance::iterate ()
3018 const auto device = m_context.getDevice();
3019 const auto physDev = m_context.getPhysicalDevice();
3020 const auto qIndex = m_context.getUniversalQueueFamilyIndex();
3021 const auto queue = m_context.getUniversalQueue();
3023 const auto& vki = m_context.getInstanceInterface();
3024 const auto& vkd = m_context.getDeviceInterface();
3025 auto & alloc = m_context.getDefaultAllocator();
3026 const auto& paramSet = m_params.descriptorSet;
3028 const auto numIterations = paramSet->maxTypes();
3029 const bool useExternalImage = needsExternalImage(*m_params.descriptorSet);
3030 const bool useExternalSampler = needsExternalSampler(*m_params.descriptorSet);
3031 const auto stageFlags = m_params.getStageFlags();
3032 const bool srcSetNeeded = (m_params.updateType == UpdateType::COPY);
3033 const bool updateAfterBind = (m_params.updateMoment == UpdateMoment::UPDATE_AFTER_BIND);
3034 const auto bindPoint = m_params.getBindPoint();
3035 const bool rayTracing = isRayTracingStage(m_params.testingStage);
3036 const bool useAABBs = (m_params.testingStage == TestingStage::INTERSECTION);
3038 // Resources for each iteration.
3039 std::vector<std::vector<Resource>> allResources;
3040 allResources.reserve(numIterations);
3043 const auto cmdPool = makeCommandPool(vkd, device, qIndex);
3045 // Descriptor pool and set for the active (dst) descriptor set.
3046 const auto dstPoolFlags = m_params.getDstPoolCreateFlags();
3047 const auto dstLayoutFlags = m_params.getDstLayoutCreateFlags();
3049 const auto dstPool = paramSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, dstPoolFlags);
3050 const auto dstLayout = paramSet->makeDescriptorSetLayout(vkd, device, stageFlags, dstLayoutFlags);
3051 const auto varCount = paramSet->getVariableDescriptorCount();
3053 using VariableCountInfoPtr = de::MovePtr<VkDescriptorSetVariableDescriptorCountAllocateInfo>;
3055 VariableCountInfoPtr dstVariableCountInfo;
3058 dstVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3059 *dstVariableCountInfo = initVulkanStructure();
3061 dstVariableCountInfo->descriptorSetCount = 1u;
3062 dstVariableCountInfo->pDescriptorCounts = &(varCount.get());
3064 const auto dstSet = makeDescriptorSet(vkd, device, dstPool.get(), dstLayout.get(), dstVariableCountInfo.get());
3066 // Source pool and set (optional).
3067 const auto srcPoolFlags = m_params.getSrcPoolCreateFlags();
3068 const auto srcLayoutFlags = m_params.getSrcLayoutCreateFlags();
3069 DescriptorSetPtr iterationSrcSet;
3070 Move<VkDescriptorPool> srcPool;
3071 Move<VkDescriptorSetLayout> srcLayout;
3072 Move<VkDescriptorSet> srcSet;
3074 // Extra set for external resources and output buffer.
3075 std::vector<Resource> extraResources;
3076 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vkd, device, alloc, qIndex, queue, useAABBs, 0u, numIterations);
3077 if (useExternalImage)
3078 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, vkd, device, alloc, qIndex, queue, useAABBs, getExternalSampledImageValue());
3079 if (useExternalSampler)
3080 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_SAMPLER, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3082 extraResources.emplace_back(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, vkd, device, alloc, qIndex, queue, useAABBs, 0u);
3084 Move<VkDescriptorPool> extraPool;
3086 DescriptorPoolBuilder poolBuilder;
3087 poolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
3088 if (useExternalImage)
3089 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
3090 if (useExternalSampler)
3091 poolBuilder.addType(VK_DESCRIPTOR_TYPE_SAMPLER);
3093 poolBuilder.addType(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
3094 extraPool = poolBuilder.build(vkd, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
3097 Move<VkDescriptorSetLayout> extraLayout;
3099 DescriptorSetLayoutBuilder layoutBuilder;
3100 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, stageFlags, nullptr);
3101 if (useExternalImage)
3102 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1u, stageFlags, nullptr);
3103 if (useExternalSampler)
3104 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_SAMPLER, 1u, stageFlags, nullptr);
3107 // The extra acceleration structure is used from the ray generation shader only.
3108 layoutBuilder.addBinding(VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, 1u, VK_SHADER_STAGE_RAYGEN_BIT_KHR, nullptr);
3110 extraLayout = layoutBuilder.build(vkd, device);
3113 const auto extraSet = makeDescriptorSet(vkd, device, extraPool.get(), extraLayout.get());
3115 // Update extra set.
3116 using DescriptorBufferInfoPtr = de::MovePtr<VkDescriptorBufferInfo>;
3117 using DescriptorImageInfoPtr = de::MovePtr<VkDescriptorImageInfo>;
3118 using DescriptorASInfoPtr = de::MovePtr<VkWriteDescriptorSetAccelerationStructureKHR>;
3120 deUint32 bindingCount = 0u;
3121 DescriptorBufferInfoPtr bufferInfoPtr;
3122 DescriptorImageInfoPtr imageInfoPtr;
3123 DescriptorImageInfoPtr samplerInfoPtr;
3124 DescriptorASInfoPtr asWriteInfoPtr;
3126 const auto outputBufferSize = static_cast<VkDeviceSize>(sizeof(deUint32) * static_cast<size_t>(numIterations));
3127 bufferInfoPtr = DescriptorBufferInfoPtr(new VkDescriptorBufferInfo(makeDescriptorBufferInfo(extraResources[bindingCount++].bufferWithMemory->get(), 0ull, outputBufferSize)));
3128 if (useExternalImage)
3129 imageInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(DE_NULL, extraResources[bindingCount++].imageView.get(), VK_IMAGE_LAYOUT_GENERAL)));
3130 if (useExternalSampler)
3131 samplerInfoPtr = DescriptorImageInfoPtr(new VkDescriptorImageInfo(makeDescriptorImageInfo(extraResources[bindingCount++].sampler.get(), DE_NULL, VK_IMAGE_LAYOUT_GENERAL)));
3134 asWriteInfoPtr = DescriptorASInfoPtr(new VkWriteDescriptorSetAccelerationStructureKHR);
3135 *asWriteInfoPtr = initVulkanStructure();
3136 asWriteInfoPtr->accelerationStructureCount = 1u;
3137 asWriteInfoPtr->pAccelerationStructures = extraResources[bindingCount++].asData.tlas.get()->getPtr();
3142 DescriptorSetUpdateBuilder updateBuilder;
3143 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, bufferInfoPtr.get());
3144 if (useExternalImage)
3145 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, imageInfoPtr.get());
3146 if (useExternalSampler)
3147 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_SAMPLER, samplerInfoPtr.get());
3149 updateBuilder.writeSingle(extraSet.get(), DescriptorSetUpdateBuilder::Location::binding(bindingCount++), VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, asWriteInfoPtr.get());
3150 updateBuilder.update(vkd, device);
3154 const deUint32 zero = 0u;
3155 const VkPushConstantRange pcRange = {stageFlags, 0u /*offset*/, static_cast<deUint32>(sizeof(zero)) /*size*/ };
3157 // Needed for some test variants.
3158 Move<VkShaderModule> vertPassthrough;
3159 Move<VkShaderModule> tesePassthrough;
3160 Move<VkShaderModule> tescPassthrough;
3161 Move<VkShaderModule> rgenPassthrough;
3162 Move<VkShaderModule> missPassthrough;
3164 if (m_params.testingStage == TestingStage::FRAGMENT
3165 || m_params.testingStage == TestingStage::GEOMETRY
3166 || m_params.testingStage == TestingStage::TESS_CONTROL
3167 || m_params.testingStage == TestingStage::TESS_EVAL)
3169 vertPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("vert"), 0u);
3172 if (m_params.testingStage == TestingStage::TESS_CONTROL)
3174 tesePassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tese"), 0u);
3177 if (m_params.testingStage == TestingStage::TESS_EVAL)
3179 tescPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("tesc"), 0u);
3182 if (m_params.testingStage == TestingStage::CLOSEST_HIT
3183 || m_params.testingStage == TestingStage::ANY_HIT
3184 || m_params.testingStage == TestingStage::INTERSECTION
3185 || m_params.testingStage == TestingStage::MISS
3186 || m_params.testingStage == TestingStage::CALLABLE)
3188 rgenPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("rgen"), 0u);
3191 if (m_params.testingStage == TestingStage::INTERSECTION)
3193 missPassthrough = createShaderModule(vkd, device, m_context.getBinaryCollection().get("miss"), 0u);
3196 for (deUint32 iteration = 0u; iteration < numIterations; ++iteration)
3198 // Generate source set for the current iteration.
3201 // Free previous descriptor set before rebuilding the pool.
3202 srcSet = Move<VkDescriptorSet>();
3203 iterationSrcSet = paramSet->genSourceSet(m_params.sourceSetStrategy, iteration);
3204 srcPool = iterationSrcSet->makeDescriptorPool(vkd, device, m_params.poolMutableStrategy, srcPoolFlags);
3205 srcLayout = iterationSrcSet->makeDescriptorSetLayout(vkd, device, stageFlags, srcLayoutFlags);
3207 const auto srcVarCount = iterationSrcSet->getVariableDescriptorCount();
3208 VariableCountInfoPtr srcVariableCountInfo;
3212 srcVariableCountInfo = VariableCountInfoPtr(new VkDescriptorSetVariableDescriptorCountAllocateInfo);
3213 *srcVariableCountInfo = initVulkanStructure();
3215 srcVariableCountInfo->descriptorSetCount = 1u;
3216 srcVariableCountInfo->pDescriptorCounts = &(srcVarCount.get());
3219 srcSet = makeDescriptorSet(vkd, device, srcPool.get(), srcLayout.get(), srcVariableCountInfo.get());
3222 // Set layouts and sets used in the pipeline.
3223 const std::vector<VkDescriptorSetLayout> setLayouts = {dstLayout.get(), extraLayout.get()};
3224 const std::vector<VkDescriptorSet> usedSets = {dstSet.get(), extraSet.get()};
3226 // Create resources.
3227 allResources.emplace_back(paramSet->createResources(vkd, device, alloc, qIndex, queue, iteration, useAABBs));
3228 const auto& resources = allResources.back();
3230 // Make pipeline for the current iteration.
3231 const auto pipelineLayout = makePipelineLayout(vkd, device, static_cast<deUint32>(setLayouts.size()), de::dataOrNull(setLayouts), 1u, &pcRange);
3232 const auto moduleName = shaderName(iteration);
3233 const auto shaderModule = createShaderModule(vkd, device, m_context.getBinaryCollection().get(moduleName), 0u);
3235 Move<VkPipeline> pipeline;
3236 Move<VkRenderPass> renderPass;
3237 Move<VkFramebuffer> framebuffer;
3239 deUint32 shaderGroupHandleSize = 0u;
3240 deUint32 shaderGroupBaseAlignment = 1u;
3242 de::MovePtr<BufferWithMemory> raygenSBT;
3243 de::MovePtr<BufferWithMemory> missSBT;
3244 de::MovePtr<BufferWithMemory> hitSBT;
3245 de::MovePtr<BufferWithMemory> callableSBT;
3247 VkStridedDeviceAddressRegionKHR raygenSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3248 VkStridedDeviceAddressRegionKHR missSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3249 VkStridedDeviceAddressRegionKHR hitSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3250 VkStridedDeviceAddressRegionKHR callableSBTRegion = makeStridedDeviceAddressRegionKHR(DE_NULL, 0, 0);
3252 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3253 pipeline = makeComputePipeline(vkd, device, pipelineLayout.get(), 0u, shaderModule.get(), 0u, nullptr);
3254 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3256 VkShaderModule vertModule = DE_NULL;
3257 VkShaderModule teseModule = DE_NULL;
3258 VkShaderModule tescModule = DE_NULL;
3259 VkShaderModule geomModule = DE_NULL;
3260 VkShaderModule fragModule = DE_NULL;
3262 if (m_params.testingStage == TestingStage::VERTEX)
3263 vertModule = shaderModule.get();
3264 else if (m_params.testingStage == TestingStage::FRAGMENT)
3266 vertModule = vertPassthrough.get();
3267 fragModule = shaderModule.get();
3269 else if (m_params.testingStage == TestingStage::GEOMETRY)
3271 vertModule = vertPassthrough.get();
3272 geomModule = shaderModule.get();
3274 else if (m_params.testingStage == TestingStage::TESS_CONTROL)
3276 vertModule = vertPassthrough.get();
3277 teseModule = tesePassthrough.get();
3278 tescModule = shaderModule.get();
3280 else if (m_params.testingStage == TestingStage::TESS_EVAL)
3282 vertModule = vertPassthrough.get();
3283 tescModule = tescPassthrough.get();
3284 teseModule = shaderModule.get();
3289 renderPass = buildRenderPass(vkd, device, resources);
3290 pipeline = buildGraphicsPipeline(vkd, device, pipelineLayout.get(), vertModule, tescModule, teseModule, geomModule, fragModule, renderPass.get());
3291 framebuffer = buildFramebuffer(vkd, device, renderPass.get(), resources);
3293 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3295 const auto rayTracingPipeline = de::newMovePtr<RayTracingPipeline>();
3296 const auto rayTracingPropertiesKHR = makeRayTracingProperties(vki, physDev);
3297 shaderGroupHandleSize = rayTracingPropertiesKHR->getShaderGroupHandleSize();
3298 shaderGroupBaseAlignment = rayTracingPropertiesKHR->getShaderGroupBaseAlignment();
3300 VkShaderModule rgenModule = DE_NULL;
3301 VkShaderModule isecModule = DE_NULL;
3302 VkShaderModule ahitModule = DE_NULL;
3303 VkShaderModule chitModule = DE_NULL;
3304 VkShaderModule missModule = DE_NULL;
3305 VkShaderModule callModule = DE_NULL;
3307 const deUint32 rgenGroup = 0u;
3308 deUint32 hitGroup = 0u;
3309 deUint32 missGroup = 0u;
3310 deUint32 callGroup = 0u;
3312 if (m_params.testingStage == TestingStage::RAY_GEN)
3314 rgenModule = shaderModule.get();
3315 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3317 else if (m_params.testingStage == TestingStage::INTERSECTION)
3321 rgenModule = rgenPassthrough.get();
3322 missModule = missPassthrough.get();
3323 isecModule = shaderModule.get();
3324 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3325 rayTracingPipeline->addShader(VK_SHADER_STAGE_INTERSECTION_BIT_KHR, isecModule, hitGroup);
3326 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3328 else if (m_params.testingStage == TestingStage::ANY_HIT)
3331 rgenModule = rgenPassthrough.get();
3332 ahitModule = shaderModule.get();
3333 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3334 rayTracingPipeline->addShader(VK_SHADER_STAGE_ANY_HIT_BIT_KHR, ahitModule, hitGroup);
3336 else if (m_params.testingStage == TestingStage::CLOSEST_HIT)
3339 rgenModule = rgenPassthrough.get();
3340 chitModule = shaderModule.get();
3341 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3342 rayTracingPipeline->addShader(VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, chitModule, hitGroup);
3344 else if (m_params.testingStage == TestingStage::MISS)
3347 rgenModule = rgenPassthrough.get();
3348 missModule = shaderModule.get();
3349 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3350 rayTracingPipeline->addShader(VK_SHADER_STAGE_MISS_BIT_KHR, missModule, missGroup);
3352 else if (m_params.testingStage == TestingStage::CALLABLE)
3355 rgenModule = rgenPassthrough.get();
3356 callModule = shaderModule.get();
3357 rayTracingPipeline->addShader(VK_SHADER_STAGE_RAYGEN_BIT_KHR, rgenModule, rgenGroup);
3358 rayTracingPipeline->addShader(VK_SHADER_STAGE_CALLABLE_BIT_KHR, callModule, callGroup);
3363 pipeline = rayTracingPipeline->createPipeline(vkd, device, pipelineLayout.get());
3365 raygenSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, rgenGroup, 1u);
3366 raygenSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, raygenSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3370 missSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, missGroup, 1u);
3371 missSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, missSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3376 hitSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, hitGroup, 1u);
3377 hitSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, hitSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3382 callableSBT = rayTracingPipeline->createShaderBindingTable(vkd, device, pipeline.get(), alloc, shaderGroupHandleSize, shaderGroupBaseAlignment, callGroup, 1u);
3383 callableSBTRegion = makeStridedDeviceAddressRegionKHR(getBufferDeviceAddress(vkd, device, callableSBT->get(), 0ull), shaderGroupHandleSize, shaderGroupHandleSize);
3389 // Command buffer for the current iteration.
3390 const auto cmdBufferPtr = allocateCommandBuffer(vkd, device, cmdPool.get(), VK_COMMAND_BUFFER_LEVEL_PRIMARY);
3391 const auto cmdBuffer = cmdBufferPtr.get();
3393 beginCommandBuffer(vkd, cmdBuffer);
3395 const Step steps[] = {
3396 (updateAfterBind ? Step::BIND : Step::UPDATE),
3397 (updateAfterBind ? Step::UPDATE : Step::BIND)
3400 for (const auto& step : steps)
3402 if (step == Step::BIND)
3404 vkd.cmdBindPipeline(cmdBuffer, bindPoint, pipeline.get());
3405 vkd.cmdBindDescriptorSets(cmdBuffer, bindPoint, pipelineLayout.get(), 0u, static_cast<deUint32>(usedSets.size()), de::dataOrNull(usedSets), 0u, nullptr);
3407 else // Step::UPDATE
3411 // Note: these operations need to be called on paramSet and not iterationSrcSet. The latter is a compatible set
3412 // that's correct and contains compatible bindings but, when a binding has been changed from non-mutable to
3413 // mutable or to an extended mutable type, the list of descriptor types for the mutable bindings in
3414 // iterationSrcSet are not in iteration order like they are in the original set and must not be taken into
3415 // account to update or copy sets.
3416 paramSet->updateDescriptorSet(vkd, device, srcSet.get(), iteration, resources);
3417 paramSet->copyDescriptorSet(vkd, device, srcSet.get(), dstSet.get());
3421 paramSet->updateDescriptorSet(vkd, device, dstSet.get(), iteration, resources);
3427 vkd.cmdPushConstants(cmdBuffer, pipelineLayout.get(), stageFlags, 0u, static_cast<deUint32>(sizeof(zero)), &zero);
3429 if (bindPoint == VK_PIPELINE_BIND_POINT_COMPUTE)
3430 vkd.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
3431 else if (bindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS)
3433 const auto extent = getDefaultExtent();
3434 const auto renderArea = makeRect2D(extent);
3436 beginRenderPass(vkd, cmdBuffer, renderPass.get(), framebuffer.get(), renderArea);
3437 vkd.cmdDraw(cmdBuffer, 3u, 1u, 0u, 0u);
3438 endRenderPass(vkd, cmdBuffer);
3440 else if (bindPoint == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR)
3442 vkd.cmdTraceRaysKHR(cmdBuffer, &raygenSBTRegion, &missSBTRegion, &hitSBTRegion, &callableSBTRegion, 1u, 1u, 1u);
3447 endCommandBuffer(vkd, cmdBuffer);
3448 submitCommandsAndWait(vkd, device, queue, cmdBuffer);
3450 // Verify output buffer.
3452 const auto outputBufferVal = extraResources[0].getStoredValue(vkd, device, alloc, qIndex, queue, iteration);
3453 DE_ASSERT(static_cast<bool>(outputBufferVal));
3455 const auto expectedValue = getExpectedOutputBufferValue();
3456 if (outputBufferVal.get() != expectedValue)
3458 std::ostringstream msg;
3459 msg << "Iteration " << iteration << ": unexpected value found in output buffer (expected " << expectedValue << " and found " << outputBufferVal.get() << ")";
3460 TCU_FAIL(msg.str());
3464 // Verify descriptor writes.
3466 size_t resourcesOffset = 0;
3467 const auto writeMask = getStoredValueMask();
3468 const auto numBindings = paramSet->numBindings();
3470 for (deUint32 bindingIdx = 0u; bindingIdx < numBindings; ++bindingIdx)
3472 const auto binding = paramSet->getBinding(bindingIdx);
3473 const auto bindingTypes = binding->typesAtIteration(iteration);
3475 for (size_t descriptorIdx = 0; descriptorIdx < bindingTypes.size(); ++descriptorIdx)
3477 const auto& descriptorType = bindingTypes[descriptorIdx];
3478 if (!isShaderWritable(descriptorType))
3481 const auto& resource = resources[resourcesOffset + descriptorIdx];
3482 const auto initialValue = resource.initialValue;
3483 const auto storedValuePtr = resource.getStoredValue(vkd, device, alloc, qIndex, queue);
3485 DE_ASSERT(static_cast<bool>(storedValuePtr));
3486 const auto storedValue = storedValuePtr.get();
3487 const auto expectedValue = (initialValue | writeMask);
3488 if (expectedValue != storedValue)
3490 std::ostringstream msg;
3491 msg << "Iteration " << iteration << ": descriptor at binding " << bindingIdx << " index " << descriptorIdx
3492 << " with type " << de::toString(descriptorType) << " contains unexpected value " << std::hex
3493 << storedValue << " (expected " << expectedValue << ")";
3494 TCU_FAIL(msg.str());
3498 resourcesOffset += bindingTypes.size();
3503 return tcu::TestStatus::pass("Pass");
3506 using GroupPtr = de::MovePtr<tcu::TestCaseGroup>;
3508 void createMutableTestVariants (tcu::TestContext& testCtx, tcu::TestCaseGroup* parentGroup, const DescriptorSetPtr& descriptorSet, const std::vector<TestingStage>& stagesToTest)
3512 UpdateType updateType;
3515 {UpdateType::WRITE, "update_write"},
3516 {UpdateType::COPY, "update_copy"},
3521 SourceSetStrategy sourceSetStrategy;
3523 } sourceStrategies[] = {
3524 {SourceSetStrategy::MUTABLE, "mutable_source"},
3525 {SourceSetStrategy::NONMUTABLE, "nonmutable_source"},
3526 {SourceSetStrategy::NO_SOURCE, "no_source"},
3531 SourceSetType sourceSetType;
3534 {SourceSetType::NORMAL, "normal_source"},
3535 {SourceSetType::HOST_ONLY, "host_only_source"},
3536 {SourceSetType::NO_SOURCE, "no_source"},
3541 PoolMutableStrategy poolMutableStrategy;
3543 } poolStrategies[] = {
3544 {PoolMutableStrategy::KEEP_TYPES, "pool_same_types"},
3545 {PoolMutableStrategy::NO_TYPES, "pool_no_types"},
3546 {PoolMutableStrategy::EXPAND_TYPES, "pool_expand_types"},
3551 UpdateMoment updateMoment;
3553 } updateMoments[] = {
3554 {UpdateMoment::NORMAL, "pre_update"},
3555 {UpdateMoment::UPDATE_AFTER_BIND, "update_after_bind"},
3560 ArrayAccessType arrayAccessType;
3562 } arrayAccessTypes[] = {
3563 {ArrayAccessType::CONSTANT, "index_constant"},
3564 {ArrayAccessType::PUSH_CONSTANT, "index_push_constant"},
3565 {ArrayAccessType::NO_ARRAY, "no_array"},
3568 const struct StageAndName
3570 TestingStage testingStage;
3572 } testStageList[] = {
3573 {TestingStage::COMPUTE, "comp"},
3574 {TestingStage::VERTEX, "vert"},
3575 {TestingStage::TESS_CONTROL, "tesc"},
3576 {TestingStage::TESS_EVAL, "tese"},
3577 {TestingStage::GEOMETRY, "geom"},
3578 {TestingStage::FRAGMENT, "frag"},
3579 {TestingStage::RAY_GEN, "rgen"},
3580 {TestingStage::INTERSECTION, "isec"},
3581 {TestingStage::ANY_HIT, "ahit"},
3582 {TestingStage::CLOSEST_HIT, "chit"},
3583 {TestingStage::MISS, "miss"},
3584 {TestingStage::CALLABLE, "call"},
3587 const bool hasArrays = descriptorSet->hasArrays();
3588 const bool hasInputAttachments = usesInputAttachments(*descriptorSet);
3590 for (const auto& ut : updateTypes)
3592 GroupPtr updateGroup(new tcu::TestCaseGroup(testCtx, ut.name, ""));
3594 for (const auto& srcStrategy : sourceStrategies)
3596 // Skip combinations that make no sense.
3597 if (ut.updateType == UpdateType::WRITE && srcStrategy.sourceSetStrategy != SourceSetStrategy::NO_SOURCE)
3600 if (ut.updateType == UpdateType::COPY && srcStrategy.sourceSetStrategy == SourceSetStrategy::NO_SOURCE)
3603 if (srcStrategy.sourceSetStrategy == SourceSetStrategy::NONMUTABLE && descriptorSet->needsAnyAliasing())
3606 GroupPtr srcStrategyGroup(new tcu::TestCaseGroup(testCtx, srcStrategy.name, ""));
3608 for (const auto& srcType : sourceTypes)
3610 // Skip combinations that make no sense.
3611 if (ut.updateType == UpdateType::WRITE && srcType.sourceSetType != SourceSetType::NO_SOURCE)
3614 if (ut.updateType == UpdateType::COPY && srcType.sourceSetType == SourceSetType::NO_SOURCE)
3617 GroupPtr srcTypeGroup(new tcu::TestCaseGroup(testCtx, srcType.name, ""));
3619 for (const auto& poolStrategy: poolStrategies)
3621 GroupPtr poolStrategyGroup(new tcu::TestCaseGroup(testCtx, poolStrategy.name, ""));
3623 for (const auto& moment : updateMoments)
3625 //if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && srcType.sourceSetType == SourceSetType::HOST_ONLY)
3628 if (moment.updateMoment == UpdateMoment::UPDATE_AFTER_BIND && hasInputAttachments)
3631 GroupPtr momentGroup(new tcu::TestCaseGroup(testCtx, moment.name, ""));
3633 for (const auto& accessType : arrayAccessTypes)
3635 // Skip combinations that make no sense.
3636 if (hasArrays && accessType.arrayAccessType == ArrayAccessType::NO_ARRAY)
3639 if (!hasArrays && accessType.arrayAccessType != ArrayAccessType::NO_ARRAY)
3642 GroupPtr accessTypeGroup(new tcu::TestCaseGroup(testCtx, accessType.name, ""));
3644 for (const auto& testStage : stagesToTest)
3646 const auto beginItr = std::begin(testStageList);
3647 const auto endItr = std::end(testStageList);
3648 const auto iter = std::find_if(beginItr, endItr, [testStage] (const StageAndName& ts) { return ts.testingStage == testStage; });
3650 DE_ASSERT(iter != endItr);
3651 const auto& stage = *iter;
3653 if (hasInputAttachments && stage.testingStage != TestingStage::FRAGMENT)
3656 TestParams params = {
3659 srcStrategy.sourceSetStrategy,
3660 srcType.sourceSetType,
3661 poolStrategy.poolMutableStrategy,
3662 moment.updateMoment,
3663 accessType.arrayAccessType,
3667 accessTypeGroup->addChild(new MutableTypesTest(testCtx, stage.name, "", params));
3670 momentGroup->addChild(accessTypeGroup.release());
3673 poolStrategyGroup->addChild(momentGroup.release());
3676 srcTypeGroup->addChild(poolStrategyGroup.release());
3679 srcStrategyGroup->addChild(srcTypeGroup.release());
3682 updateGroup->addChild(srcStrategyGroup.release());
3685 parentGroup->addChild(updateGroup.release());
3691 std::string descriptorTypeStr (VkDescriptorType descriptorType)
3693 static const auto prefixLen = std::string("VK_DESCRIPTOR_TYPE_").size();
3694 return de::toLower(de::toString(descriptorType).substr(prefixLen));
3697 tcu::TestCaseGroup* createDescriptorValveMutableTests (tcu::TestContext& testCtx)
3699 GroupPtr mainGroup(new tcu::TestCaseGroup(testCtx, "mutable_descriptor", "Tests for VK_VALVE_mutable_descriptor_type"));
3701 const VkDescriptorType basicDescriptorTypes[] = {
3702 VK_DESCRIPTOR_TYPE_SAMPLER,
3703 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
3704 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
3705 VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
3706 VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
3707 VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
3708 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
3709 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
3710 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
3711 VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
3714 static const auto mandatoryTypes = getMandatoryMutableTypes();
3716 using StageVec = std::vector<TestingStage>;
3718 const StageVec allStages =
3720 TestingStage::COMPUTE,
3721 TestingStage::VERTEX,
3722 TestingStage::TESS_CONTROL,
3723 TestingStage::TESS_EVAL,
3724 TestingStage::GEOMETRY,
3725 TestingStage::FRAGMENT,
3726 TestingStage::RAY_GEN,
3727 TestingStage::INTERSECTION,
3728 TestingStage::ANY_HIT,
3729 TestingStage::CLOSEST_HIT,
3731 TestingStage::CALLABLE,
3734 const StageVec reducedStages =
3736 TestingStage::COMPUTE,
3737 TestingStage::VERTEX,
3738 TestingStage::FRAGMENT,
3739 TestingStage::RAY_GEN,
3742 const StageVec computeOnly =
3744 TestingStage::COMPUTE,
3747 // Basic tests with a single mutable descriptor.
3749 GroupPtr singleCases(new tcu::TestCaseGroup(testCtx, "single", "Basic mutable descriptor tests with a single mutable descriptor"));
3751 for (const auto& descriptorType : basicDescriptorTypes)
3753 const auto groupName = descriptorTypeStr(descriptorType);
3754 const std::vector<VkDescriptorType> actualTypes(1u, descriptorType);
3756 DescriptorSetPtr setPtr;
3758 DescriptorSet::BindingPtrVector setBindings;
3759 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, actualTypes));
3760 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3763 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3764 createMutableTestVariants(testCtx, subGroup.get(), setPtr, allStages);
3766 singleCases->addChild(subGroup.release());
3769 // Case with a single descriptor that iterates several types.
3771 DescriptorSetPtr setPtr;
3773 DescriptorSet::BindingPtrVector setBindings;
3774 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mandatoryTypes));
3775 setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3778 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "all_mandatory", ""));
3779 createMutableTestVariants(testCtx, subGroup.get(), setPtr, reducedStages);
3781 singleCases->addChild(subGroup.release());
3784 // Cases that try to verify switching from any descriptor type to any other is possible.
3786 GroupPtr subGroup(new tcu::TestCaseGroup(testCtx, "switches", "Test switching from one to another descriptor type works as expected"));
3788 for (const auto& initialDescriptorType : basicDescriptorTypes)
3790 for (const auto& finalDescriptorType : basicDescriptorTypes)
3792 if (initialDescriptorType == finalDescriptorType)
3795 const std::vector<VkDescriptorType> mutableTypes { initialDescriptorType, finalDescriptorType };
3796 DescriptorSet::BindingPtrVector setBindings;
3797 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mutableTypes));
3799 DescriptorSetPtr setPtr = DescriptorSetPtr(new DescriptorSet(setBindings));
3801 const auto groupName = descriptorTypeStr(initialDescriptorType) + "_" + descriptorTypeStr(finalDescriptorType);
3802 GroupPtr combinationGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3803 createMutableTestVariants(testCtx, combinationGroup.get(), setPtr, reducedStages);
3804 subGroup->addChild(combinationGroup.release());
3808 singleCases->addChild(subGroup.release());
3811 mainGroup->addChild(singleCases.release());
3814 // Cases with a single non-mutable descriptor. This provides some basic checks to verify copying to non-mutable bindings works.
3816 GroupPtr singleNonMutableGroup (new tcu::TestCaseGroup(testCtx, "single_nonmutable", "Tests using a single non-mutable descriptor"));
3818 for (const auto& descriptorType : basicDescriptorTypes)
3820 DescriptorSet::BindingPtrVector bindings;
3821 bindings.emplace_back(new SingleBinding(descriptorType, std::vector<VkDescriptorType>()));
3822 DescriptorSetPtr descriptorSet (new DescriptorSet(bindings));
3824 const auto groupName = descriptorTypeStr(descriptorType);
3825 GroupPtr descGroup (new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3827 createMutableTestVariants(testCtx, descGroup.get(), descriptorSet, reducedStages);
3828 singleNonMutableGroup->addChild(descGroup.release());
3831 mainGroup->addChild(singleNonMutableGroup.release());
3837 } unboundedCases[] = {
3838 {false, "constant_size"},
3839 {true, "unbounded"},
3845 } aliasingCases[] = {
3846 {false, "noaliasing"},
3853 const char* groupName;
3854 const char* groupDesc;
3855 } arrayCountGroups[] = {
3856 {true, false, "one_array", "Tests using an array of mutable descriptors"},
3857 {false, false, "multiple_arrays", "Tests using multiple arrays of mutable descriptors"},
3858 {false, true, "multiple_arrays_mixed", "Tests using multiple arrays of mutable descriptors mixed with arrays of nonmutable ones"},
3861 for (const auto& variant : arrayCountGroups)
3863 GroupPtr arrayGroup(new tcu::TestCaseGroup(testCtx, variant.groupName, variant.groupDesc));
3865 for (const auto& unboundedCase : unboundedCases)
3867 GroupPtr unboundedGroup(new tcu::TestCaseGroup(testCtx, unboundedCase.name, ""));
3869 for (const auto& aliasingCase : aliasingCases)
3871 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
3873 DescriptorSet::BindingPtrVector setBindings;
3875 // Prepare descriptors for this test variant.
3876 for (size_t mandatoryTypesRotation = 0; mandatoryTypesRotation < mandatoryTypes.size(); ++mandatoryTypesRotation)
3878 const bool isLastBinding = (variant.oneArrayOnly || mandatoryTypesRotation == mandatoryTypes.size() - 1u);
3879 const bool isUnbounded = (unboundedCase.unbounded && isLastBinding);
3881 // Create a rotation of the mandatory types for each mutable array binding.
3882 auto mandatoryTypesVector = mandatoryTypes;
3884 const auto beginPtr = &mandatoryTypesVector[0];
3885 const auto endPtr = beginPtr + mandatoryTypesVector.size();
3886 std::rotate(beginPtr, &mandatoryTypesVector[mandatoryTypesRotation], endPtr);
3889 std::vector<SingleBinding> arrayBindings;
3891 if (aliasingCase.aliasing)
3893 // With aliasing, the descriptor types rotate in each descriptor.
3894 for (size_t typeIdx = 0; typeIdx < mandatoryTypesVector.size(); ++typeIdx)
3896 auto rotatedTypes = mandatoryTypesVector;
3897 const auto beginPtr = &rotatedTypes[0];
3898 const auto endPtr = beginPtr + rotatedTypes.size();
3900 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
3902 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes);
3907 // Without aliasing, all descriptors use the same type at the same time.
3908 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, mandatoryTypesVector);
3909 arrayBindings.resize(mandatoryTypesVector.size(), noAliasingBinding);
3912 setBindings.emplace_back(new ArrayBinding(isUnbounded, arrayBindings));
3914 if (variant.mixNonMutable && !isUnbounded)
3916 // Create a non-mutable array binding interleaved with the other ones.
3917 const SingleBinding nonMutableBinding(mandatoryTypes[mandatoryTypesRotation], std::vector<VkDescriptorType>());
3918 std::vector<SingleBinding> nonMutableBindings(mandatoryTypes.size(), nonMutableBinding);
3919 setBindings.emplace_back(new ArrayBinding(false, nonMutableBindings));
3922 if (variant.oneArrayOnly)
3926 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
3927 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
3929 unboundedGroup->addChild(aliasingGroup.release());
3932 arrayGroup->addChild(unboundedGroup.release());
3935 mainGroup->addChild(arrayGroup.release());
3938 // Cases with a single mutable binding followed by an array of mutable bindings.
3939 // The array will use a single type beyond the mandatory ones.
3941 GroupPtr singleAndArrayGroup(new tcu::TestCaseGroup(testCtx, "single_and_array", "Tests using a single mutable binding followed by a mutable array binding"));
3943 for (const auto& descriptorType : basicDescriptorTypes)
3945 // Input attachments will not use arrays.
3946 if (descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
3949 if (de::contains(begin(mandatoryTypes), end(mandatoryTypes), descriptorType))
3952 const auto groupName = descriptorTypeStr(descriptorType);
3953 GroupPtr descTypeGroup(new tcu::TestCaseGroup(testCtx, groupName.c_str(), ""));
3955 for (const auto& aliasingCase : aliasingCases)
3957 GroupPtr aliasingGroup(new tcu::TestCaseGroup(testCtx, aliasingCase.name, ""));
3959 DescriptorSet::BindingPtrVector setBindings;
3960 std::vector<SingleBinding> arrayBindings;
3962 // Add single type beyond the mandatory ones.
3963 auto arrayBindingDescTypes = mandatoryTypes;
3964 arrayBindingDescTypes.push_back(descriptorType);
3966 // Single mutable descriptor as the first binding.
3967 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, arrayBindingDescTypes));
3969 // Descriptor array as the second binding.
3970 if (aliasingCase.aliasing)
3972 // With aliasing, the descriptor types rotate in each descriptor.
3973 for (size_t typeIdx = 0; typeIdx < arrayBindingDescTypes.size(); ++typeIdx)
3975 auto rotatedTypes = arrayBindingDescTypes;
3976 const auto beginPtr = &rotatedTypes[0];
3977 const auto endPtr = beginPtr + rotatedTypes.size();
3979 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
3981 arrayBindings.emplace_back(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes);
3986 // Without aliasing, all descriptors use the same type at the same time.
3987 const SingleBinding noAliasingBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, arrayBindingDescTypes);
3988 arrayBindings.resize(arrayBindingDescTypes.size(), noAliasingBinding);
3991 // Second binding: array binding.
3992 setBindings.emplace_back(new ArrayBinding(false/*unbounded*/, arrayBindings));
3994 // Create set and test variants.
3995 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
3996 createMutableTestVariants(testCtx, aliasingGroup.get(), descriptorSet, computeOnly);
3998 descTypeGroup->addChild(aliasingGroup.release());
4001 singleAndArrayGroup->addChild(descTypeGroup.release());
4004 mainGroup->addChild(singleAndArrayGroup.release());
4007 // Cases with several mutable non-array bindings.
4009 GroupPtr multipleGroup (new tcu::TestCaseGroup(testCtx, "multiple", "Tests using multiple mutable bindings"));
4010 GroupPtr mutableOnlyGroup (new tcu::TestCaseGroup(testCtx, "mutable_only", "Tests using only mutable descriptors"));
4011 GroupPtr mixedGroup (new tcu::TestCaseGroup(testCtx, "mixed", "Tests mixing mutable descriptors an non-mutable descriptors"));
4013 // Each descriptor will have a different type in every iteration, like in the one_array aliasing case.
4014 for (int groupIdx = 0; groupIdx < 2; ++groupIdx)
4016 const bool mixed = (groupIdx == 1);
4017 DescriptorSet::BindingPtrVector setBindings;
4019 for (size_t typeIdx = 0; typeIdx < mandatoryTypes.size(); ++typeIdx)
4021 auto rotatedTypes = mandatoryTypes;
4022 const auto beginPtr = &rotatedTypes[0];
4023 const auto endPtr = beginPtr + rotatedTypes.size();
4025 std::rotate(beginPtr, &rotatedTypes[typeIdx], endPtr);
4026 setBindings.emplace_back(new SingleBinding(VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, rotatedTypes));
4028 // Additional non-mutable binding interleaved with the mutable ones.
4030 setBindings.emplace_back(new SingleBinding(rotatedTypes[0], std::vector<VkDescriptorType>()));
4032 DescriptorSetPtr descriptorSet(new DescriptorSet(setBindings));
4034 const auto dstGroup = (mixed ? mixedGroup.get() : mutableOnlyGroup.get());
4035 createMutableTestVariants(testCtx, dstGroup, descriptorSet, computeOnly);
4038 multipleGroup->addChild(mutableOnlyGroup.release());
4039 multipleGroup->addChild(mixedGroup.release());
4040 mainGroup->addChild(multipleGroup.release());
4043 return mainGroup.release();