1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2015 Google Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
21 * \brief Pipeline barrier tests
22 *//*--------------------------------------------------------------------*/
24 #include "vktMemoryPipelineBarrierTests.hpp"
26 #include "vktTestCaseUtil.hpp"
29 #include "vkPlatform.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkQueryUtil.hpp"
32 #include "vkMemUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkPrograms.hpp"
36 #include "tcuMaybe.hpp"
37 #include "tcuTextureUtil.hpp"
38 #include "tcuTestLog.hpp"
39 #include "tcuResultCollector.hpp"
40 #include "tcuTexture.hpp"
41 #include "tcuImageCompare.hpp"
43 #include "deUniquePtr.hpp"
44 #include "deStringUtil.hpp"
45 #include "deRandom.hpp"
71 using tcu::ConstPixelBufferAccess;
72 using tcu::PixelBufferAccess;
73 using tcu::TextureFormat;
74 using tcu::TextureLevel;
84 MAX_UNIFORM_BUFFER_SIZE = 1024,
85 MAX_STORAGE_BUFFER_SIZE = (1<<28)
88 // \todo [mika] Add to utilities
90 T divRoundUp (const T& a, const T& b)
92 return (a / b) + (a % b == 0 ? 0 : 1);
97 ALL_PIPELINE_STAGES = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
98 | vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
99 | vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
100 | vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
101 | vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
102 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
103 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
104 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
105 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
106 | vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
107 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
108 | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
109 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
110 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT
111 | vk::VK_PIPELINE_STAGE_HOST_BIT
116 ALL_ACCESSES = vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT
117 | vk::VK_ACCESS_INDEX_READ_BIT
118 | vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
119 | vk::VK_ACCESS_UNIFORM_READ_BIT
120 | vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
121 | vk::VK_ACCESS_SHADER_READ_BIT
122 | vk::VK_ACCESS_SHADER_WRITE_BIT
123 | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
124 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
125 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
126 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
127 | vk::VK_ACCESS_TRANSFER_READ_BIT
128 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
129 | vk::VK_ACCESS_HOST_READ_BIT
130 | vk::VK_ACCESS_HOST_WRITE_BIT
131 | vk::VK_ACCESS_MEMORY_READ_BIT
132 | vk::VK_ACCESS_MEMORY_WRITE_BIT
137 // Mapped host read and write
138 USAGE_HOST_READ = (0x1u<<0),
139 USAGE_HOST_WRITE = (0x1u<<1),
141 // Copy and other transfer operations
142 USAGE_TRANSFER_SRC = (0x1u<<2),
143 USAGE_TRANSFER_DST = (0x1u<<3),
145 // Buffer usage flags
146 USAGE_INDEX_BUFFER = (0x1u<<4),
147 USAGE_VERTEX_BUFFER = (0x1u<<5),
149 USAGE_UNIFORM_BUFFER = (0x1u<<6),
150 USAGE_STORAGE_BUFFER = (0x1u<<7),
152 USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
153 USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
155 // \todo [2016-03-09 mika] This is probably almost impossible to do
156 USAGE_INDIRECT_BUFFER = (0x1u<<10),
158 // Texture usage flags
159 USAGE_SAMPLED_IMAGE = (0x1u<<11),
160 USAGE_STORAGE_IMAGE = (0x1u<<12),
161 USAGE_COLOR_ATTACHMENT = (0x1u<<13),
162 USAGE_INPUT_ATTACHMENT = (0x1u<<14),
163 USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
166 bool supportsDeviceBufferWrites (Usage usage)
168 if (usage & USAGE_TRANSFER_DST)
171 if (usage & USAGE_STORAGE_BUFFER)
174 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
180 bool supportsDeviceImageWrites (Usage usage)
182 if (usage & USAGE_TRANSFER_DST)
185 if (usage & USAGE_STORAGE_IMAGE)
188 if (usage & USAGE_COLOR_ATTACHMENT)
194 // Sequential access enums
197 ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
198 ACCESS_INDEX_READ_BIT,
199 ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
200 ACCESS_UNIFORM_READ_BIT,
201 ACCESS_INPUT_ATTACHMENT_READ_BIT,
202 ACCESS_SHADER_READ_BIT,
203 ACCESS_SHADER_WRITE_BIT,
204 ACCESS_COLOR_ATTACHMENT_READ_BIT,
205 ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
206 ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
207 ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
208 ACCESS_TRANSFER_READ_BIT,
209 ACCESS_TRANSFER_WRITE_BIT,
210 ACCESS_HOST_READ_BIT,
211 ACCESS_HOST_WRITE_BIT,
212 ACCESS_MEMORY_READ_BIT,
213 ACCESS_MEMORY_WRITE_BIT,
218 // Sequential stage enums
221 PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
222 PIPELINESTAGE_BOTTOM_OF_PIPE_BIT,
223 PIPELINESTAGE_DRAW_INDIRECT_BIT,
224 PIPELINESTAGE_VERTEX_INPUT_BIT,
225 PIPELINESTAGE_VERTEX_SHADER_BIT,
226 PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
227 PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
228 PIPELINESTAGE_GEOMETRY_SHADER_BIT,
229 PIPELINESTAGE_FRAGMENT_SHADER_BIT,
230 PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
231 PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
232 PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
233 PIPELINESTAGE_COMPUTE_SHADER_BIT,
234 PIPELINESTAGE_TRANSFER_BIT,
235 PIPELINESTAGE_HOST_BIT,
240 PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
244 case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT: return PIPELINESTAGE_TOP_OF_PIPE_BIT;
245 case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT: return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
246 case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT: return PIPELINESTAGE_DRAW_INDIRECT_BIT;
247 case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT: return PIPELINESTAGE_VERTEX_INPUT_BIT;
248 case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT: return PIPELINESTAGE_VERTEX_SHADER_BIT;
249 case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
250 case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
251 case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT: return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
252 case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT: return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
253 case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
254 case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
255 case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
256 case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: return PIPELINESTAGE_COMPUTE_SHADER_BIT;
257 case vk::VK_PIPELINE_STAGE_TRANSFER_BIT: return PIPELINESTAGE_TRANSFER_BIT;
258 case vk::VK_PIPELINE_STAGE_HOST_BIT: return PIPELINESTAGE_HOST_BIT;
261 DE_FATAL("Unknown pipeline stage flags");
262 return PIPELINESTAGE_LAST;
266 Usage operator| (Usage a, Usage b)
268 return (Usage)((deUint32)a | (deUint32)b);
271 Usage operator& (Usage a, Usage b)
273 return (Usage)((deUint32)a & (deUint32)b);
276 string usageToName (Usage usage)
281 const char* const name;
284 { USAGE_HOST_READ, "host_read" },
285 { USAGE_HOST_WRITE, "host_write" },
287 { USAGE_TRANSFER_SRC, "transfer_src" },
288 { USAGE_TRANSFER_DST, "transfer_dst" },
290 { USAGE_INDEX_BUFFER, "index_buffer" },
291 { USAGE_VERTEX_BUFFER, "vertex_buffer" },
292 { USAGE_UNIFORM_BUFFER, "uniform_buffer" },
293 { USAGE_STORAGE_BUFFER, "storage_buffer" },
294 { USAGE_UNIFORM_TEXEL_BUFFER, "uniform_texel_buffer" },
295 { USAGE_STORAGE_TEXEL_BUFFER, "storage_texel_buffer" },
296 { USAGE_INDIRECT_BUFFER, "indirect_buffer" },
297 { USAGE_SAMPLED_IMAGE, "image_sampled" },
298 { USAGE_STORAGE_IMAGE, "storage_image" },
299 { USAGE_COLOR_ATTACHMENT, "color_attachment" },
300 { USAGE_INPUT_ATTACHMENT, "input_attachment" },
301 { USAGE_DEPTH_STENCIL_ATTACHMENT, "depth_stencil_attachment" },
304 std::ostringstream stream;
307 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
309 if (usage & usageNames[usageNdx].usage)
316 stream << usageNames[usageNdx].name;
323 vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
325 vk::VkBufferUsageFlags flags = 0;
327 if (usage & USAGE_TRANSFER_SRC)
328 flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
330 if (usage & USAGE_TRANSFER_DST)
331 flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
333 if (usage & USAGE_INDEX_BUFFER)
334 flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
336 if (usage & USAGE_VERTEX_BUFFER)
337 flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339 if (usage & USAGE_INDIRECT_BUFFER)
340 flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
342 if (usage & USAGE_UNIFORM_BUFFER)
343 flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
345 if (usage & USAGE_STORAGE_BUFFER)
346 flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
348 if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
349 flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
351 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
352 flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
357 vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
359 vk::VkImageUsageFlags flags = 0;
361 if (usage & USAGE_TRANSFER_SRC)
362 flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
364 if (usage & USAGE_TRANSFER_DST)
365 flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
367 if (usage & USAGE_SAMPLED_IMAGE)
368 flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
370 if (usage & USAGE_STORAGE_IMAGE)
371 flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
373 if (usage & USAGE_COLOR_ATTACHMENT)
374 flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
376 if (usage & USAGE_INPUT_ATTACHMENT)
377 flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
379 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
380 flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
385 vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
387 vk::VkPipelineStageFlags flags = 0;
389 if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
390 flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
392 if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
393 flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
395 if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
396 flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
398 if (usage & USAGE_INDIRECT_BUFFER)
399 flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
402 (USAGE_UNIFORM_BUFFER
403 | USAGE_STORAGE_BUFFER
404 | USAGE_UNIFORM_TEXEL_BUFFER
405 | USAGE_STORAGE_TEXEL_BUFFER
406 | USAGE_SAMPLED_IMAGE
407 | USAGE_STORAGE_IMAGE))
409 flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
410 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
411 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
412 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
413 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
414 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
417 if (usage & USAGE_INPUT_ATTACHMENT)
418 flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
420 if (usage & USAGE_COLOR_ATTACHMENT)
421 flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
423 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
425 flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
426 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
432 vk::VkAccessFlags usageToAccessFlags (Usage usage)
434 vk::VkAccessFlags flags = 0;
436 if (usage & USAGE_HOST_READ)
437 flags |= vk::VK_ACCESS_HOST_READ_BIT;
439 if (usage & USAGE_HOST_WRITE)
440 flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
442 if (usage & USAGE_TRANSFER_SRC)
443 flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
445 if (usage & USAGE_TRANSFER_DST)
446 flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
448 if (usage & USAGE_INDEX_BUFFER)
449 flags |= vk::VK_ACCESS_INDEX_READ_BIT;
451 if (usage & USAGE_VERTEX_BUFFER)
452 flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
454 if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
455 flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
457 if (usage & USAGE_SAMPLED_IMAGE)
458 flags |= vk::VK_ACCESS_SHADER_READ_BIT;
460 if (usage & (USAGE_STORAGE_BUFFER
461 | USAGE_STORAGE_TEXEL_BUFFER
462 | USAGE_STORAGE_IMAGE))
463 flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
465 if (usage & USAGE_INDIRECT_BUFFER)
466 flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
468 if (usage & USAGE_COLOR_ATTACHMENT)
469 flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
471 if (usage & USAGE_INPUT_ATTACHMENT)
472 flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
474 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
475 flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
476 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
484 vk::VkDeviceSize size;
485 vk::VkSharingMode sharing;
488 vk::Move<vk::VkCommandBuffer> createCommandBuffer (const vk::DeviceInterface& vkd,
490 vk::VkCommandPool pool,
491 vk::VkCommandBufferLevel level)
493 const vk::VkCommandBufferAllocateInfo bufferInfo =
495 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
503 return vk::allocateCommandBuffer(vkd, device, &bufferInfo);
506 vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface& vkd,
508 vk::VkCommandPool pool,
509 vk::VkCommandBufferLevel level)
511 const vk::VkCommandBufferInheritanceInfo inheritInfo =
513 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
522 const vk::VkCommandBufferBeginInfo beginInfo =
524 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
527 (level == vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY ? &inheritInfo : (const vk::VkCommandBufferInheritanceInfo*)DE_NULL),
530 vk::Move<vk::VkCommandBuffer> commandBuffer (createCommandBuffer(vkd, device, pool, level));
532 vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
534 return commandBuffer;
537 vk::Move<vk::VkCommandPool> createCommandPool (const vk::DeviceInterface& vkd,
539 deUint32 queueFamilyIndex)
541 const vk::VkCommandPoolCreateInfo poolInfo =
543 vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
546 vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
550 return vk::createCommandPool(vkd, device, &poolInfo);
553 vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface& vkd,
555 vk::VkDeviceSize size,
556 vk::VkBufferUsageFlags usage,
557 vk::VkSharingMode sharingMode,
558 const vector<deUint32>& queueFamilies)
560 const vk::VkBufferCreateInfo createInfo =
562 vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
569 (deUint32)queueFamilies.size(),
573 return vk::createBuffer(vkd, device, &createInfo);
576 vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface& vkd,
578 vk::VkDeviceSize size,
579 deUint32 memoryTypeIndex)
581 const vk::VkMemoryAllocateInfo alloc =
583 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
590 return vk::allocateMemory(vkd, device, &alloc);
593 vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface& vki,
594 const vk::DeviceInterface& vkd,
595 vk::VkPhysicalDevice physicalDevice,
598 vk::VkMemoryPropertyFlags properties)
600 const vk::VkMemoryRequirements memoryRequirements = vk::getBufferMemoryRequirements(vkd, device, buffer);
601 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
602 deUint32 memoryTypeIndex;
604 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
606 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
607 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
611 const vk::VkMemoryAllocateInfo allocationInfo =
613 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
615 memoryRequirements.size,
618 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
620 VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
624 catch (const vk::Error& error)
626 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
627 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
629 // Try next memory type/heap if out of memory
633 // Throw all other errors forward
640 TCU_FAIL("Failed to allocate memory for buffer");
643 vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface& vki,
644 const vk::DeviceInterface& vkd,
645 vk::VkPhysicalDevice physicalDevice,
648 vk::VkMemoryPropertyFlags properties)
650 const vk::VkMemoryRequirements memoryRequirements = vk::getImageMemoryRequirements(vkd, device, image);
651 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
652 deUint32 memoryTypeIndex;
654 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
656 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
657 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
661 const vk::VkMemoryAllocateInfo allocationInfo =
663 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
665 memoryRequirements.size,
668 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
670 VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
674 catch (const vk::Error& error)
676 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
677 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
679 // Try next memory type/heap if out of memory
683 // Throw all other errors forward
690 TCU_FAIL("Failed to allocate memory for image");
693 void queueRun (const vk::DeviceInterface& vkd,
695 vk::VkCommandBuffer commandBuffer)
697 const vk::VkSubmitInfo submitInfo =
699 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
704 (const vk::VkPipelineStageFlags*)DE_NULL,
713 VK_CHECK(vkd.queueSubmit(queue, 1, &submitInfo, 0));
714 VK_CHECK(vkd.queueWaitIdle(queue));
717 void* mapMemory (const vk::DeviceInterface& vkd,
719 vk::VkDeviceMemory memory,
720 vk::VkDeviceSize size)
724 VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
729 class ReferenceMemory
732 ReferenceMemory (size_t size);
734 void set (size_t pos, deUint8 val);
735 deUint8 get (size_t pos) const;
736 bool isDefined (size_t pos) const;
738 void setDefined (size_t offset, size_t size, const void* data);
739 void setUndefined (size_t offset, size_t size);
740 void setData (size_t offset, size_t size, const void* data);
742 size_t getSize (void) const { return m_data.size(); }
745 vector<deUint8> m_data;
746 vector<deUint64> m_defined;
749 ReferenceMemory::ReferenceMemory (size_t size)
751 , m_defined (size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
755 void ReferenceMemory::set (size_t pos, deUint8 val)
757 DE_ASSERT(pos < m_data.size());
760 m_defined[pos / 64] |= 0x1ull << (pos % 64);
763 void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
765 const deUint8* data = (const deUint8*)data_;
767 DE_ASSERT(offset < m_data.size());
768 DE_ASSERT(offset + size <= m_data.size());
770 // \todo [2016-03-09 mika] Optimize
771 for (size_t pos = 0; pos < size; pos++)
773 m_data[offset + pos] = data[pos];
774 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
778 void ReferenceMemory::setUndefined (size_t offset, size_t size)
780 // \todo [2016-03-09 mika] Optimize
781 for (size_t pos = 0; pos < size; pos++)
782 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
785 deUint8 ReferenceMemory::get (size_t pos) const
787 DE_ASSERT(pos < m_data.size());
788 DE_ASSERT(isDefined(pos));
792 bool ReferenceMemory::isDefined (size_t pos) const
794 DE_ASSERT(pos < m_data.size());
796 return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
802 Memory (const vk::InstanceInterface& vki,
803 const vk::DeviceInterface& vkd,
804 vk::VkPhysicalDevice physicalDevice,
806 vk::VkDeviceSize size,
807 deUint32 memoryTypeIndex,
808 vk::VkDeviceSize maxBufferSize,
809 deInt32 maxImageWidth,
810 deInt32 maxImageHeight);
812 vk::VkDeviceSize getSize (void) const { return m_size; }
813 vk::VkDeviceSize getMaxBufferSize (void) const { return m_maxBufferSize; }
814 bool getSupportBuffers (void) const { return m_maxBufferSize > 0; }
816 deInt32 getMaxImageWidth (void) const { return m_maxImageWidth; }
817 deInt32 getMaxImageHeight (void) const { return m_maxImageHeight; }
818 bool getSupportImages (void) const { return m_maxImageWidth > 0; }
820 const vk::VkMemoryType& getMemoryType (void) const { return m_memoryType; }
821 deUint32 getMemoryTypeIndex (void) const { return m_memoryTypeIndex; }
822 vk::VkDeviceMemory getMemory (void) const { return *m_memory; }
825 const vk::VkDeviceSize m_size;
826 const deUint32 m_memoryTypeIndex;
827 const vk::VkMemoryType m_memoryType;
828 const vk::Unique<vk::VkDeviceMemory> m_memory;
829 const vk::VkDeviceSize m_maxBufferSize;
830 const deInt32 m_maxImageWidth;
831 const deInt32 m_maxImageHeight;
834 vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface& vki,
835 vk::VkPhysicalDevice device,
836 deUint32 memoryTypeIndex)
838 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
840 DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
842 return memoryProperties.memoryTypes[memoryTypeIndex];
845 vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface& vkd,
848 vk::VkBufferUsageFlags usage,
849 vk::VkSharingMode sharingMode,
850 const vector<deUint32>& queueFamilies,
852 vk::VkDeviceSize memorySize,
853 deUint32 memoryTypeIndex)
855 vk::VkDeviceSize lastSuccess = 0;
856 vk::VkDeviceSize currentSize = memorySize / 2;
859 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
860 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
862 if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
866 for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
868 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
869 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
871 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
873 lastSuccess = currentSize;
874 currentSize += stepSize;
877 currentSize -= stepSize;
886 // Round size down maximum W * H * 4, where W and H < 4096
887 vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
889 const vk::VkDeviceSize maxTextureSize = 4096;
890 vk::VkDeviceSize maxTexelCount = size / 4;
891 vk::VkDeviceSize bestW = de::max(maxTexelCount, maxTextureSize);
892 vk::VkDeviceSize bestH = maxTexelCount / bestW;
894 // \todo [2016-03-09 mika] Could probably be faster?
895 for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
897 const vk::VkDeviceSize h = maxTexelCount / w;
899 if (bestW * bestH < w * h)
906 return bestW * bestH * 4;
909 // Find RGBA8 image size that has exactly "size" of number of bytes.
910 // "size" must be W * H * 4 where W and H < 4096
911 IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
913 const vk::VkDeviceSize maxTextureSize = 4096;
914 vk::VkDeviceSize texelCount = size / 4;
916 DE_ASSERT((size % 4) == 0);
918 // \todo [2016-03-09 mika] Could probably be faster?
919 for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
921 const vk::VkDeviceSize h = texelCount / w;
923 if ((texelCount % w) == 0 && h < maxTextureSize)
924 return IVec2((int)w, (int)h);
927 DE_FATAL("Invalid size");
928 return IVec2(-1, -1);
931 IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface& vkd,
934 vk::VkImageUsageFlags usage,
935 vk::VkSharingMode sharingMode,
936 const vector<deUint32>& queueFamilies,
938 vk::VkDeviceSize memorySize,
939 deUint32 memoryTypeIndex)
941 IVec2 lastSuccess (0);
945 const deUint32 texelCount = (deUint32)(memorySize / 4);
946 const deUint32 width = (deUint32)deFloatSqrt((float)texelCount);
947 const deUint32 height = texelCount / width;
949 currentSize[0] = deMaxu32(width, height);
950 currentSize[1] = deMinu32(width, height);
953 for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
955 const vk::VkImageCreateInfo createInfo =
957 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
961 vk::VK_IMAGE_TYPE_2D,
962 vk::VK_FORMAT_R8G8B8A8_UNORM,
964 (deUint32)currentSize[0],
965 (deUint32)currentSize[1],
969 vk::VK_SAMPLE_COUNT_1_BIT,
970 vk::VK_IMAGE_TILING_OPTIMAL,
973 (deUint32)queueFamilies.size(),
975 vk::VK_IMAGE_LAYOUT_UNDEFINED
977 const vk::Unique<vk::VkImage> image (vk::createImage(vkd, device, &createInfo));
978 const vk::VkMemoryRequirements requirements (vk::getImageMemoryRequirements(vkd, device, *image));
980 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
982 lastSuccess = currentSize;
983 currentSize[0] += stepSize;
984 currentSize[1] += stepSize;
988 currentSize[0] -= stepSize;
989 currentSize[1] -= stepSize;
999 Memory::Memory (const vk::InstanceInterface& vki,
1000 const vk::DeviceInterface& vkd,
1001 vk::VkPhysicalDevice physicalDevice,
1002 vk::VkDevice device,
1003 vk::VkDeviceSize size,
1004 deUint32 memoryTypeIndex,
1005 vk::VkDeviceSize maxBufferSize,
1006 deInt32 maxImageWidth,
1007 deInt32 maxImageHeight)
1009 , m_memoryTypeIndex (memoryTypeIndex)
1010 , m_memoryType (getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
1011 , m_memory (allocMemory(vkd, device, size, memoryTypeIndex))
1012 , m_maxBufferSize (maxBufferSize)
1013 , m_maxImageWidth (maxImageWidth)
1014 , m_maxImageHeight (maxImageHeight)
1021 Context (const vk::InstanceInterface& vki,
1022 const vk::DeviceInterface& vkd,
1023 vk::VkPhysicalDevice physicalDevice,
1024 vk::VkDevice device,
1026 deUint32 queueFamilyIndex,
1027 const vector<pair<deUint32, vk::VkQueue> >& queues,
1028 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection)
1031 , m_physicalDevice (physicalDevice)
1034 , m_queueFamilyIndex (queueFamilyIndex)
1036 , m_commandPool (createCommandPool(vkd, device, queueFamilyIndex))
1037 , m_binaryCollection (binaryCollection)
1039 for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
1040 m_queueFamilies.push_back(m_queues[queueNdx].first);
1043 const vk::InstanceInterface& getInstanceInterface (void) const { return m_vki; }
1044 vk::VkPhysicalDevice getPhysicalDevice (void) const { return m_physicalDevice; }
1045 vk::VkDevice getDevice (void) const { return m_device; }
1046 const vk::DeviceInterface& getDeviceInterface (void) const { return m_vkd; }
1047 vk::VkQueue getQueue (void) const { return m_queue; }
1048 deUint32 getQueueFamily (void) const { return m_queueFamilyIndex; }
1049 const vector<pair<deUint32, vk::VkQueue> >& getQueues (void) const { return m_queues; }
1050 const vector<deUint32> getQueueFamilies (void) const { return m_queueFamilies; }
1051 vk::VkCommandPool getCommandPool (void) const { return *m_commandPool; }
1052 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_binaryCollection; }
1055 const vk::InstanceInterface& m_vki;
1056 const vk::DeviceInterface& m_vkd;
1057 const vk::VkPhysicalDevice m_physicalDevice;
1058 const vk::VkDevice m_device;
1059 const vk::VkQueue m_queue;
1060 const deUint32 m_queueFamilyIndex;
1061 const vector<pair<deUint32, vk::VkQueue> > m_queues;
1062 const vk::Unique<vk::VkCommandPool> m_commandPool;
1063 const vk::ProgramCollection<vk::ProgramBinary>& m_binaryCollection;
1064 vector<deUint32> m_queueFamilies;
1067 class PrepareContext
1070 PrepareContext (const Context& context,
1071 const Memory& memory)
1072 : m_context (context)
1077 const Memory& getMemory (void) const { return m_memory; }
1078 const Context& getContext (void) const { return m_context; }
1079 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
1081 void setBuffer (vk::Move<vk::VkBuffer> buffer,
1082 vk::VkDeviceSize size)
1084 DE_ASSERT(!m_currentImage);
1085 DE_ASSERT(!m_currentBuffer);
1087 m_currentBuffer = buffer;
1088 m_currentBufferSize = size;
1091 vk::VkBuffer getBuffer (void) const { return *m_currentBuffer; }
1092 vk::VkDeviceSize getBufferSize (void) const
1094 DE_ASSERT(m_currentBuffer);
1095 return m_currentBufferSize;
1098 void releaseBuffer (void) { m_currentBuffer.disown(); }
1100 void setImage (vk::Move<vk::VkImage> image,
1101 vk::VkImageLayout layout,
1102 vk::VkDeviceSize memorySize,
1106 DE_ASSERT(!m_currentImage);
1107 DE_ASSERT(!m_currentBuffer);
1109 m_currentImage = image;
1110 m_currentImageMemorySize = memorySize;
1111 m_currentImageLayout = layout;
1112 m_currentImageWidth = width;
1113 m_currentImageHeight = height;
1116 void setImageLayout (vk::VkImageLayout layout)
1118 DE_ASSERT(m_currentImage);
1119 m_currentImageLayout = layout;
1122 vk::VkImage getImage (void) const { return *m_currentImage; }
1123 deInt32 getImageWidth (void) const
1125 DE_ASSERT(m_currentImage);
1126 return m_currentImageWidth;
1128 deInt32 getImageHeight (void) const
1130 DE_ASSERT(m_currentImage);
1131 return m_currentImageHeight;
1133 vk::VkDeviceSize getImageMemorySize (void) const
1135 DE_ASSERT(m_currentImage);
1136 return m_currentImageMemorySize;
1139 void releaseImage (void) { m_currentImage.disown(); }
1141 vk::VkImageLayout getImageLayout (void) const
1143 DE_ASSERT(m_currentImage);
1144 return m_currentImageLayout;
1148 const Context& m_context;
1149 const Memory& m_memory;
1151 vk::Move<vk::VkBuffer> m_currentBuffer;
1152 vk::VkDeviceSize m_currentBufferSize;
1154 vk::Move<vk::VkImage> m_currentImage;
1155 vk::VkDeviceSize m_currentImageMemorySize;
1156 vk::VkImageLayout m_currentImageLayout;
1157 deInt32 m_currentImageWidth;
1158 deInt32 m_currentImageHeight;
1161 class ExecuteContext
1164 ExecuteContext (const Context& context)
1165 : m_context (context)
1169 const Context& getContext (void) const { return m_context; }
1170 void setMapping (void* ptr) { m_mapping = ptr; }
1171 void* getMapping (void) const { return m_mapping; }
1174 const Context& m_context;
1181 VerifyContext (TestLog& log,
1182 tcu::ResultCollector& resultCollector,
1183 const Context& context,
1184 vk::VkDeviceSize size)
1186 , m_resultCollector (resultCollector)
1187 , m_context (context)
1188 , m_reference ((size_t)size)
1192 const Context& getContext (void) const { return m_context; }
1193 TestLog& getLog (void) const { return m_log; }
1194 tcu::ResultCollector& getResultCollector (void) const { return m_resultCollector; }
1196 ReferenceMemory& getReference (void) { return m_reference; }
1197 TextureLevel& getReferenceImage (void) { return m_referenceImage;}
1201 tcu::ResultCollector& m_resultCollector;
1202 const Context& m_context;
1203 ReferenceMemory m_reference;
1204 TextureLevel m_referenceImage;
1210 // Constructor should allocate all non-vulkan resources.
1211 virtual ~Command (void) {}
1213 // Get name of the command
1214 virtual const char* getName (void) const = 0;
1216 // Log prepare operations
1217 virtual void logPrepare (TestLog&, size_t) const {}
1218 // Log executed operations
1219 virtual void logExecute (TestLog&, size_t) const {}
1221 // Prepare should allocate all vulkan resources and resources that require
1222 // that buffer or memory has been already allocated. This should build all
1223 // command buffers etc.
1224 virtual void prepare (PrepareContext&) {}
1226 // Execute command. Write or read mapped memory, submit commands to queue
1228 virtual void execute (ExecuteContext&) {}
1230 // Verify that results are correct.
1231 virtual void verify (VerifyContext&, size_t) {}
1234 // Allow only inheritance
1239 Command (const Command&);
1240 Command& operator& (const Command&);
1243 class Map : public Command
1248 const char* getName (void) const { return "Map"; }
1251 void logExecute (TestLog& log, size_t commandIndex) const
1253 log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
1256 void prepare (PrepareContext& context)
1258 m_memory = context.getMemory().getMemory();
1259 m_size = context.getMemory().getSize();
1262 void execute (ExecuteContext& context)
1264 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1265 const vk::VkDevice device = context.getContext().getDevice();
1267 context.setMapping(mapMemory(vkd, device, m_memory, m_size));
1271 vk::VkDeviceMemory m_memory;
1272 vk::VkDeviceSize m_size;
1275 class UnMap : public Command
1280 const char* getName (void) const { return "UnMap"; }
1282 void logExecute (TestLog& log, size_t commandIndex) const
1284 log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
1287 void prepare (PrepareContext& context)
1289 m_memory = context.getMemory().getMemory();
1292 void execute (ExecuteContext& context)
1294 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1295 const vk::VkDevice device = context.getContext().getDevice();
1297 vkd.unmapMemory(device, m_memory);
1298 context.setMapping(DE_NULL);
1302 vk::VkDeviceMemory m_memory;
1305 class Invalidate : public Command
1308 Invalidate (void) {}
1309 ~Invalidate (void) {}
1310 const char* getName (void) const { return "Invalidate"; }
1312 void logExecute (TestLog& log, size_t commandIndex) const
1314 log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
1317 void prepare (PrepareContext& context)
1319 m_memory = context.getMemory().getMemory();
1320 m_size = context.getMemory().getSize();
1323 void execute (ExecuteContext& context)
1325 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1326 const vk::VkDevice device = context.getContext().getDevice();
1328 vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1332 vk::VkDeviceMemory m_memory;
1333 vk::VkDeviceSize m_size;
1336 class Flush : public Command
1341 const char* getName (void) const { return "Flush"; }
1343 void logExecute (TestLog& log, size_t commandIndex) const
1345 log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
1348 void prepare (PrepareContext& context)
1350 m_memory = context.getMemory().getMemory();
1351 m_size = context.getMemory().getSize();
1354 void execute (ExecuteContext& context)
1356 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1357 const vk::VkDevice device = context.getContext().getDevice();
1359 vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1363 vk::VkDeviceMemory m_memory;
1364 vk::VkDeviceSize m_size;
1367 // Host memory reads and writes
1368 class HostMemoryAccess : public Command
1371 HostMemoryAccess (bool read, bool write, deUint32 seed);
1372 ~HostMemoryAccess (void) {}
1373 const char* getName (void) const { return "HostMemoryAccess"; }
1375 void logExecute (TestLog& log, size_t commandIndex) const;
1376 void prepare (PrepareContext& context);
1377 void execute (ExecuteContext& context);
1379 void verify (VerifyContext& context, size_t commandIndex);
1384 const deUint32 m_seed;
1387 vector<deUint8> m_readData;
1390 HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
1397 void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
1399 log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "") << ", seed: " << m_seed << TestLog::EndMessage;
1402 void HostMemoryAccess::prepare (PrepareContext& context)
1404 m_size = (size_t)context.getMemory().getSize();
1407 m_readData.resize(m_size, 0);
1410 void HostMemoryAccess::execute (ExecuteContext& context)
1412 de::Random rng (m_seed);
1413 deUint8* const ptr = (deUint8*)context.getMapping();
1415 if (m_read && m_write)
1417 for (size_t pos = 0; pos < m_size; pos++)
1419 const deUint8 mask = rng.getUint8();
1420 const deUint8 value = ptr[pos];
1422 m_readData[pos] = value;
1423 ptr[pos] = value ^ mask;
1428 for (size_t pos = 0; pos < m_size; pos++)
1430 const deUint8 value = ptr[pos];
1432 m_readData[pos] = value;
1437 for (size_t pos = 0; pos < m_size; pos++)
1439 const deUint8 value = rng.getUint8();
1445 DE_FATAL("Host memory access without read or write.");
1448 void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
1450 tcu::ResultCollector& resultCollector = context.getResultCollector();
1451 ReferenceMemory& reference = context.getReference();
1452 de::Random rng (m_seed);
1454 if (m_read && m_write)
1456 for (size_t pos = 0; pos < m_size; pos++)
1458 const deUint8 mask = rng.getUint8();
1459 const deUint8 value = m_readData[pos];
1461 if (reference.isDefined(pos))
1463 if (value != reference.get(pos))
1465 resultCollector.fail(
1466 de::toString(commandIndex) + ":" + getName()
1467 + " Result differs from reference, Expected: "
1468 + de::toString(tcu::toHex<8>(reference.get(pos)))
1470 + de::toString(tcu::toHex<8>(value))
1472 + de::toString(pos));
1476 reference.set(pos, reference.get(pos) ^ mask);
1482 for (size_t pos = 0; pos < m_size; pos++)
1484 const deUint8 value = m_readData[pos];
1486 if (reference.isDefined(pos))
1488 if (value != reference.get(pos))
1490 resultCollector.fail(
1491 de::toString(commandIndex) + ":" + getName()
1492 + " Result differs from reference, Expected: "
1493 + de::toString(tcu::toHex<8>(reference.get(pos)))
1495 + de::toString(tcu::toHex<8>(value))
1497 + de::toString(pos));
1505 for (size_t pos = 0; pos < m_size; pos++)
1507 const deUint8 value = rng.getUint8();
1509 reference.set(pos, value);
1513 DE_FATAL("Host memory access without read or write.");
1516 class CreateBuffer : public Command
1519 CreateBuffer (vk::VkBufferUsageFlags usage,
1520 vk::VkSharingMode sharing);
1521 ~CreateBuffer (void) {}
1522 const char* getName (void) const { return "CreateBuffer"; }
1524 void logPrepare (TestLog& log, size_t commandIndex) const;
1525 void prepare (PrepareContext& context);
1528 const vk::VkBufferUsageFlags m_usage;
1529 const vk::VkSharingMode m_sharing;
1532 CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags usage,
1533 vk::VkSharingMode sharing)
1535 , m_sharing (sharing)
1539 void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1541 log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
1544 void CreateBuffer::prepare (PrepareContext& context)
1546 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1547 const vk::VkDevice device = context.getContext().getDevice();
1548 const vk::VkDeviceSize bufferSize = context.getMemory().getMaxBufferSize();
1549 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1551 context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
1554 class DestroyBuffer : public Command
1557 DestroyBuffer (void);
1558 ~DestroyBuffer (void) {}
1559 const char* getName (void) const { return "DestroyBuffer"; }
1561 void logExecute (TestLog& log, size_t commandIndex) const;
1562 void prepare (PrepareContext& context);
1563 void execute (ExecuteContext& context);
1566 vk::Move<vk::VkBuffer> m_buffer;
1569 DestroyBuffer::DestroyBuffer (void)
1573 void DestroyBuffer::prepare (PrepareContext& context)
1575 m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1576 context.releaseBuffer();
1579 void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
1581 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
1584 void DestroyBuffer::execute (ExecuteContext& context)
1586 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1587 const vk::VkDevice device = context.getContext().getDevice();
1589 vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
1592 class BindBufferMemory : public Command
1595 BindBufferMemory (void) {}
1596 ~BindBufferMemory (void) {}
1597 const char* getName (void) const { return "BindBufferMemory"; }
1599 void logPrepare (TestLog& log, size_t commandIndex) const;
1600 void prepare (PrepareContext& context);
1603 void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
1605 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
1608 void BindBufferMemory::prepare (PrepareContext& context)
1610 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1611 const vk::VkDevice device = context.getContext().getDevice();
1613 VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
1616 class CreateImage : public Command
1619 CreateImage (vk::VkImageUsageFlags usage,
1620 vk::VkSharingMode sharing);
1621 ~CreateImage (void) {}
1622 const char* getName (void) const { return "CreateImage"; }
1624 void logPrepare (TestLog& log, size_t commandIndex) const;
1625 void prepare (PrepareContext& context);
1626 void verify (VerifyContext& context, size_t commandIndex);
1629 const vk::VkImageUsageFlags m_usage;
1630 const vk::VkSharingMode m_sharing;
1631 deInt32 m_imageWidth;
1632 deInt32 m_imageHeight;
1635 CreateImage::CreateImage (vk::VkImageUsageFlags usage,
1636 vk::VkSharingMode sharing)
1638 , m_sharing (sharing)
1642 void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
1644 log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage) << TestLog::EndMessage;
1647 void CreateImage::prepare (PrepareContext& context)
1649 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1650 const vk::VkDevice device = context.getContext().getDevice();
1651 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1653 m_imageWidth = context.getMemory().getMaxImageWidth();
1654 m_imageHeight = context.getMemory().getMaxImageHeight();
1657 const vk::VkImageCreateInfo createInfo =
1659 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1663 vk::VK_IMAGE_TYPE_2D,
1664 vk::VK_FORMAT_R8G8B8A8_UNORM,
1666 (deUint32)m_imageWidth,
1667 (deUint32)m_imageHeight,
1671 vk::VK_SAMPLE_COUNT_1_BIT,
1672 vk::VK_IMAGE_TILING_OPTIMAL,
1675 (deUint32)queueFamilies.size(),
1677 vk::VK_IMAGE_LAYOUT_UNDEFINED
1679 vk::Move<vk::VkImage> image (createImage(vkd, device, &createInfo));
1680 const vk::VkMemoryRequirements requirements = vk::getImageMemoryRequirements(vkd, device, *image);
1682 context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
1686 void CreateImage::verify (VerifyContext& context, size_t)
1688 context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
1691 class DestroyImage : public Command
1694 DestroyImage (void);
1695 ~DestroyImage (void) {}
1696 const char* getName (void) const { return "DestroyImage"; }
1698 void logExecute (TestLog& log, size_t commandIndex) const;
1699 void prepare (PrepareContext& context);
1700 void execute (ExecuteContext& context);
1703 vk::Move<vk::VkImage> m_image;
1706 DestroyImage::DestroyImage (void)
1710 void DestroyImage::prepare (PrepareContext& context)
1712 m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1713 context.releaseImage();
1717 void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
1719 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
1722 void DestroyImage::execute (ExecuteContext& context)
1724 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1725 const vk::VkDevice device = context.getContext().getDevice();
1727 vkd.destroyImage(device, m_image.disown(), DE_NULL);
1730 class BindImageMemory : public Command
1733 BindImageMemory (void) {}
1734 ~BindImageMemory (void) {}
1735 const char* getName (void) const { return "BindImageMemory"; }
1737 void logPrepare (TestLog& log, size_t commandIndex) const;
1738 void prepare (PrepareContext& context);
1741 void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
1743 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
1746 void BindImageMemory::prepare (PrepareContext& context)
1748 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1749 const vk::VkDevice device = context.getContext().getDevice();
1751 VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
1754 class QueueWaitIdle : public Command
1757 QueueWaitIdle (void) {}
1758 ~QueueWaitIdle (void) {}
1759 const char* getName (void) const { return "QueuetWaitIdle"; }
1761 void logExecute (TestLog& log, size_t commandIndex) const;
1762 void execute (ExecuteContext& context);
1765 void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1767 log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
1770 void QueueWaitIdle::execute (ExecuteContext& context)
1772 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1773 const vk::VkQueue queue = context.getContext().getQueue();
1775 VK_CHECK(vkd.queueWaitIdle(queue));
1778 class DeviceWaitIdle : public Command
1781 DeviceWaitIdle (void) {}
1782 ~DeviceWaitIdle (void) {}
1783 const char* getName (void) const { return "DeviceWaitIdle"; }
1785 void logExecute (TestLog& log, size_t commandIndex) const;
1786 void execute (ExecuteContext& context);
1789 void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1791 log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
1794 void DeviceWaitIdle::execute (ExecuteContext& context)
1796 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1797 const vk::VkDevice device = context.getContext().getDevice();
1799 VK_CHECK(vkd.deviceWaitIdle(device));
1805 SubmitContext (const PrepareContext& context,
1806 const vk::VkCommandBuffer commandBuffer)
1807 : m_context (context)
1808 , m_commandBuffer (commandBuffer)
1812 const Memory& getMemory (void) const { return m_context.getMemory(); }
1813 const Context& getContext (void) const { return m_context.getContext(); }
1814 vk::VkCommandBuffer getCommandBuffer (void) const { return m_commandBuffer; }
1816 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
1817 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
1819 vk::VkImage getImage (void) const { return m_context.getImage(); }
1820 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
1821 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
1824 const PrepareContext& m_context;
1825 const vk::VkCommandBuffer m_commandBuffer;
1831 virtual ~CmdCommand (void) {}
1832 virtual const char* getName (void) const = 0;
1834 // Log things that are done during prepare
1835 virtual void logPrepare (TestLog&, size_t) const {}
1836 // Log submitted calls etc.
1837 virtual void logSubmit (TestLog&, size_t) const {}
1839 // Allocate vulkan resources and prepare for submit.
1840 virtual void prepare (PrepareContext&) {}
1842 // Submit commands to command buffer.
1843 virtual void submit (SubmitContext&) {}
1846 virtual void verify (VerifyContext&, size_t) {}
1849 class SubmitCommandBuffer : public Command
1852 SubmitCommandBuffer (const vector<CmdCommand*>& commands);
1853 ~SubmitCommandBuffer (void);
1855 const char* getName (void) const { return "SubmitCommandBuffer"; }
1856 void logExecute (TestLog& log, size_t commandIndex) const;
1857 void logPrepare (TestLog& log, size_t commandIndex) const;
1859 // Allocate command buffer and submit commands to command buffer
1860 void prepare (PrepareContext& context);
1861 void execute (ExecuteContext& context);
1863 // Verify that results are correct.
1864 void verify (VerifyContext& context, size_t commandIndex);
1867 vector<CmdCommand*> m_commands;
1868 vk::Move<vk::VkCommandBuffer> m_commandBuffer;
1871 SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
1872 : m_commands (commands)
1876 SubmitCommandBuffer::~SubmitCommandBuffer (void)
1878 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1879 delete m_commands[cmdNdx];
1882 void SubmitCommandBuffer::prepare (PrepareContext& context)
1884 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1885 const vk::VkDevice device = context.getContext().getDevice();
1886 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
1888 m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1890 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1892 CmdCommand& command = *m_commands[cmdNdx];
1894 command.prepare(context);
1898 SubmitContext submitContext (context, *m_commandBuffer);
1900 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1902 CmdCommand& command = *m_commands[cmdNdx];
1904 command.submit(submitContext);
1907 VK_CHECK(vkd.endCommandBuffer(*m_commandBuffer));
1911 void SubmitCommandBuffer::execute (ExecuteContext& context)
1913 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1914 const vk::VkCommandBuffer cmd = *m_commandBuffer;
1915 const vk::VkQueue queue = context.getContext().getQueue();
1916 const vk::VkSubmitInfo submit =
1918 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
1923 (const vk::VkPipelineStageFlags*)DE_NULL,
1932 vkd.queueSubmit(queue, 1, &submit, 0);
1935 void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
1937 const string sectionName (de::toString(commandIndex) + ":" + getName());
1938 const tcu::ScopedLogSection section (context.getLog(), sectionName, sectionName);
1940 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1941 m_commands[cmdNdx]->verify(context, cmdNdx);
1944 void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1946 const string sectionName (de::toString(commandIndex) + ":" + getName());
1947 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1949 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1950 m_commands[cmdNdx]->logPrepare(log, cmdNdx);
1953 void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
1955 const string sectionName (de::toString(commandIndex) + ":" + getName());
1956 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1958 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1959 m_commands[cmdNdx]->logSubmit(log, cmdNdx);
1962 class PipelineBarrier : public CmdCommand
1972 PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1973 const vk::VkAccessFlags srcAccesses,
1974 const vk::VkPipelineStageFlags dstStages,
1975 const vk::VkAccessFlags dstAccesses,
1977 const tcu::Maybe<vk::VkImageLayout> imageLayout);
1978 ~PipelineBarrier (void) {}
1979 const char* getName (void) const { return "PipelineBarrier"; }
1981 void logSubmit (TestLog& log, size_t commandIndex) const;
1982 void submit (SubmitContext& context);
1985 const vk::VkPipelineStageFlags m_srcStages;
1986 const vk::VkAccessFlags m_srcAccesses;
1987 const vk::VkPipelineStageFlags m_dstStages;
1988 const vk::VkAccessFlags m_dstAccesses;
1990 const tcu::Maybe<vk::VkImageLayout> m_imageLayout;
1993 PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1994 const vk::VkAccessFlags srcAccesses,
1995 const vk::VkPipelineStageFlags dstStages,
1996 const vk::VkAccessFlags dstAccesses,
1998 const tcu::Maybe<vk::VkImageLayout> imageLayout)
1999 : m_srcStages (srcStages)
2000 , m_srcAccesses (srcAccesses)
2001 , m_dstStages (dstStages)
2002 , m_dstAccesses (dstAccesses)
2004 , m_imageLayout (imageLayout)
2008 void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
2010 log << TestLog::Message << commandIndex << ":" << getName()
2011 << " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
2012 : m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
2013 : "Image pipeline barrier")
2014 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2015 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
2018 void PipelineBarrier::submit (SubmitContext& context)
2020 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2021 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2027 const vk::VkMemoryBarrier barrier =
2029 vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
2036 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 1, &barrier, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2042 const vk::VkBufferMemoryBarrier barrier =
2044 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2050 VK_QUEUE_FAMILY_IGNORED,
2051 VK_QUEUE_FAMILY_IGNORED,
2053 context.getBuffer(),
2058 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2064 const vk::VkImageMemoryBarrier barrier =
2066 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2075 VK_QUEUE_FAMILY_IGNORED,
2076 VK_QUEUE_FAMILY_IGNORED,
2080 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2086 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2091 DE_FATAL("Unknown pipeline barrier type");
2095 class ImageTransition : public CmdCommand
2098 ImageTransition (vk::VkPipelineStageFlags srcStages,
2099 vk::VkAccessFlags srcAccesses,
2101 vk::VkPipelineStageFlags dstStages,
2102 vk::VkAccessFlags dstAccesses,
2104 vk::VkImageLayout srcLayout,
2105 vk::VkImageLayout dstLayout);
2107 ~ImageTransition (void) {}
2108 const char* getName (void) const { return "ImageTransition"; }
2110 void prepare (PrepareContext& context);
2111 void logSubmit (TestLog& log, size_t commandIndex) const;
2112 void submit (SubmitContext& context);
2113 void verify (VerifyContext& context, size_t);
2116 const vk::VkPipelineStageFlags m_srcStages;
2117 const vk::VkAccessFlags m_srcAccesses;
2118 const vk::VkPipelineStageFlags m_dstStages;
2119 const vk::VkAccessFlags m_dstAccesses;
2120 const vk::VkImageLayout m_srcLayout;
2121 const vk::VkImageLayout m_dstLayout;
2123 vk::VkDeviceSize m_imageMemorySize;
2126 ImageTransition::ImageTransition (vk::VkPipelineStageFlags srcStages,
2127 vk::VkAccessFlags srcAccesses,
2129 vk::VkPipelineStageFlags dstStages,
2130 vk::VkAccessFlags dstAccesses,
2132 vk::VkImageLayout srcLayout,
2133 vk::VkImageLayout dstLayout)
2134 : m_srcStages (srcStages)
2135 , m_srcAccesses (srcAccesses)
2136 , m_dstStages (dstStages)
2137 , m_dstAccesses (dstAccesses)
2138 , m_srcLayout (srcLayout)
2139 , m_dstLayout (dstLayout)
2143 void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
2145 log << TestLog::Message << commandIndex << ":" << getName()
2146 << " Image transition pipeline barrier"
2147 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2148 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses)
2149 << ", srcLayout: " << m_srcLayout << ", dstLayout: " << m_dstLayout << TestLog::EndMessage;
2152 void ImageTransition::prepare (PrepareContext& context)
2154 DE_ASSERT(context.getImageLayout() == vk::VK_IMAGE_LAYOUT_UNDEFINED || m_srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED || context.getImageLayout() == m_srcLayout);
2156 context.setImageLayout(m_dstLayout);
2157 m_imageMemorySize = context.getImageMemorySize();
2160 void ImageTransition::submit (SubmitContext& context)
2162 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2163 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2164 const vk::VkImageMemoryBarrier barrier =
2166 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2175 VK_QUEUE_FAMILY_IGNORED,
2176 VK_QUEUE_FAMILY_IGNORED,
2180 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2186 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2189 void ImageTransition::verify (VerifyContext& context, size_t)
2191 context.getReference().setUndefined(0, (size_t)m_imageMemorySize);
2194 class FillBuffer : public CmdCommand
2197 FillBuffer (deUint32 value) : m_value(value) {}
2198 ~FillBuffer (void) {}
2199 const char* getName (void) const { return "FillBuffer"; }
2201 void logSubmit (TestLog& log, size_t commandIndex) const;
2202 void submit (SubmitContext& context);
2203 void verify (VerifyContext& context, size_t commandIndex);
2206 const deUint32 m_value;
2207 vk::VkDeviceSize m_bufferSize;
2210 void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2212 log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
2215 void FillBuffer::submit (SubmitContext& context)
2217 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2218 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2219 const vk::VkBuffer buffer = context.getBuffer();
2220 const vk::VkDeviceSize sizeMask = ~(0x3ull); // \note Round down to multiple of 4
2222 m_bufferSize = sizeMask & context.getBufferSize();
2223 vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
2226 void FillBuffer::verify (VerifyContext& context, size_t)
2228 ReferenceMemory& reference = context.getReference();
2230 for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
2232 #if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2233 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
2235 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
2240 class UpdateBuffer : public CmdCommand
2243 UpdateBuffer (deUint32 seed) : m_seed(seed) {}
2244 ~UpdateBuffer (void) {}
2245 const char* getName (void) const { return "UpdateBuffer"; }
2247 void logSubmit (TestLog& log, size_t commandIndex) const;
2248 void submit (SubmitContext& context);
2249 void verify (VerifyContext& context, size_t commandIndex);
2252 const deUint32 m_seed;
2253 vk::VkDeviceSize m_bufferSize;
2256 void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2258 log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
2261 void UpdateBuffer::submit (SubmitContext& context)
2263 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2264 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2265 const vk::VkBuffer buffer = context.getBuffer();
2266 const size_t blockSize = 65536;
2267 std::vector<deUint8> data (blockSize, 0);
2268 de::Random rng (m_seed);
2270 m_bufferSize = context.getBufferSize();
2272 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2274 for (size_t ndx = 0; ndx < data.size(); ndx++)
2275 data[ndx] = rng.getUint8();
2277 if (m_bufferSize - updated > blockSize)
2278 vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
2280 vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
2284 void UpdateBuffer::verify (VerifyContext& context, size_t)
2286 ReferenceMemory& reference = context.getReference();
2287 const size_t blockSize = 65536;
2288 vector<deUint8> data (blockSize, 0);
2289 de::Random rng (m_seed);
2291 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2293 for (size_t ndx = 0; ndx < data.size(); ndx++)
2294 data[ndx] = rng.getUint8();
2296 if (m_bufferSize - updated > blockSize)
2297 reference.setData(updated, blockSize, &data[0]);
2299 reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
2303 class BufferCopyToBuffer : public CmdCommand
2306 BufferCopyToBuffer (void) {}
2307 ~BufferCopyToBuffer (void) {}
2308 const char* getName (void) const { return "BufferCopyToBuffer"; }
2310 void logPrepare (TestLog& log, size_t commandIndex) const;
2311 void prepare (PrepareContext& context);
2312 void logSubmit (TestLog& log, size_t commandIndex) const;
2313 void submit (SubmitContext& context);
2314 void verify (VerifyContext& context, size_t commandIndex);
2317 vk::VkDeviceSize m_bufferSize;
2318 vk::Move<vk::VkBuffer> m_dstBuffer;
2319 vk::Move<vk::VkDeviceMemory> m_memory;
2322 void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2324 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
2327 void BufferCopyToBuffer::prepare (PrepareContext& context)
2329 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2330 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2331 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2332 const vk::VkDevice device = context.getContext().getDevice();
2333 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2335 m_bufferSize = context.getBufferSize();
2337 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2338 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2341 void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2343 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
2346 void BufferCopyToBuffer::submit (SubmitContext& context)
2348 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2349 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2350 const vk::VkBufferCopy range =
2356 vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
2359 void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2361 tcu::ResultCollector& resultCollector (context.getResultCollector());
2362 ReferenceMemory& reference (context.getReference());
2363 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2364 const vk::VkDevice device = context.getContext().getDevice();
2365 const vk::VkQueue queue = context.getContext().getQueue();
2366 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2367 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2368 const vk::VkBufferMemoryBarrier barrier =
2370 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2373 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2374 vk::VK_ACCESS_HOST_READ_BIT,
2376 VK_QUEUE_FAMILY_IGNORED,
2377 VK_QUEUE_FAMILY_IGNORED,
2383 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2385 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2386 queueRun(vkd, queue, *commandBuffer);
2389 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2392 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2395 const deUint8* const data = (const deUint8*)ptr;
2397 for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
2399 if (reference.isDefined(pos))
2401 if (data[pos] != reference.get(pos))
2403 resultCollector.fail(
2404 de::toString(commandIndex) + ":" + getName()
2405 + " Result differs from reference, Expected: "
2406 + de::toString(tcu::toHex<8>(reference.get(pos)))
2408 + de::toString(tcu::toHex<8>(data[pos]))
2410 + de::toString(pos));
2417 vkd.unmapMemory(device, *m_memory);
2420 context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
2424 class BufferCopyFromBuffer : public CmdCommand
2427 BufferCopyFromBuffer (deUint32 seed) : m_seed(seed) {}
2428 ~BufferCopyFromBuffer (void) {}
2429 const char* getName (void) const { return "BufferCopyFromBuffer"; }
2431 void logPrepare (TestLog& log, size_t commandIndex) const;
2432 void prepare (PrepareContext& context);
2433 void logSubmit (TestLog& log, size_t commandIndex) const;
2434 void submit (SubmitContext& context);
2435 void verify (VerifyContext& context, size_t commandIndex);
2438 const deUint32 m_seed;
2439 vk::VkDeviceSize m_bufferSize;
2440 vk::Move<vk::VkBuffer> m_srcBuffer;
2441 vk::Move<vk::VkDeviceMemory> m_memory;
2444 void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2446 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
2449 void BufferCopyFromBuffer::prepare (PrepareContext& context)
2451 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2452 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2453 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2454 const vk::VkDevice device = context.getContext().getDevice();
2455 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2457 m_bufferSize = context.getBufferSize();
2458 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2459 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2462 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2463 de::Random rng (m_seed);
2466 deUint8* const data = (deUint8*)ptr;
2468 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2469 data[ndx] = rng.getUint8();
2472 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2473 vkd.unmapMemory(device, *m_memory);
2477 void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2479 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
2482 void BufferCopyFromBuffer::submit (SubmitContext& context)
2484 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2485 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2486 const vk::VkBufferCopy range =
2492 vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
2495 void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
2497 ReferenceMemory& reference (context.getReference());
2498 de::Random rng (m_seed);
2500 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2501 reference.set(ndx, rng.getUint8());
2504 class BufferCopyToImage : public CmdCommand
2507 BufferCopyToImage (void) {}
2508 ~BufferCopyToImage (void) {}
2509 const char* getName (void) const { return "BufferCopyToImage"; }
2511 void logPrepare (TestLog& log, size_t commandIndex) const;
2512 void prepare (PrepareContext& context);
2513 void logSubmit (TestLog& log, size_t commandIndex) const;
2514 void submit (SubmitContext& context);
2515 void verify (VerifyContext& context, size_t commandIndex);
2518 deInt32 m_imageWidth;
2519 deInt32 m_imageHeight;
2520 vk::Move<vk::VkImage> m_dstImage;
2521 vk::Move<vk::VkDeviceMemory> m_memory;
2524 void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
2526 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
2529 void BufferCopyToImage::prepare (PrepareContext& context)
2531 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2532 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2533 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2534 const vk::VkDevice device = context.getContext().getDevice();
2535 const vk::VkQueue queue = context.getContext().getQueue();
2536 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2537 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2538 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2540 m_imageWidth = imageSize[0];
2541 m_imageHeight = imageSize[1];
2544 const vk::VkImageCreateInfo createInfo =
2546 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2550 vk::VK_IMAGE_TYPE_2D,
2551 vk::VK_FORMAT_R8G8B8A8_UNORM,
2553 (deUint32)m_imageWidth,
2554 (deUint32)m_imageHeight,
2557 1, 1, // mipLevels, arrayLayers
2558 vk::VK_SAMPLE_COUNT_1_BIT,
2560 vk::VK_IMAGE_TILING_OPTIMAL,
2561 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2562 vk::VK_SHARING_MODE_EXCLUSIVE,
2564 (deUint32)queueFamilies.size(),
2566 vk::VK_IMAGE_LAYOUT_UNDEFINED
2569 m_dstImage = vk::createImage(vkd, device, &createInfo);
2572 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
2575 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2576 const vk::VkImageMemoryBarrier barrier =
2578 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2582 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2584 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2585 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2587 VK_QUEUE_FAMILY_IGNORED,
2588 VK_QUEUE_FAMILY_IGNORED,
2592 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2594 1, // Mip level count
2600 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2602 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2603 queueRun(vkd, queue, *commandBuffer);
2607 void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
2609 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
2612 void BufferCopyToImage::submit (SubmitContext& context)
2614 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2615 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2616 const vk::VkBufferImageCopy region =
2621 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2628 (deUint32)m_imageWidth,
2629 (deUint32)m_imageHeight,
2634 vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2637 void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
2639 tcu::ResultCollector& resultCollector (context.getResultCollector());
2640 ReferenceMemory& reference (context.getReference());
2641 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2642 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2643 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2644 const vk::VkDevice device = context.getContext().getDevice();
2645 const vk::VkQueue queue = context.getContext().getQueue();
2646 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2647 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2648 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2649 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2650 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2652 const vk::VkImageMemoryBarrier imageBarrier =
2654 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2657 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2658 vk::VK_ACCESS_TRANSFER_READ_BIT,
2660 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2661 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2663 VK_QUEUE_FAMILY_IGNORED,
2664 VK_QUEUE_FAMILY_IGNORED,
2668 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2670 1, // Mip level count
2675 const vk::VkBufferMemoryBarrier bufferBarrier =
2677 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2680 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2681 vk::VK_ACCESS_HOST_READ_BIT,
2683 VK_QUEUE_FAMILY_IGNORED,
2684 VK_QUEUE_FAMILY_IGNORED,
2690 const vk::VkBufferImageCopy region =
2695 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2702 (deUint32)m_imageWidth,
2703 (deUint32)m_imageHeight,
2708 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
2709 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
2710 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2713 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2714 queueRun(vkd, queue, *commandBuffer);
2717 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2719 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2722 const deUint8* const data = (const deUint8*)ptr;
2724 for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
2726 if (reference.isDefined(pos))
2728 if (data[pos] != reference.get(pos))
2730 resultCollector.fail(
2731 de::toString(commandIndex) + ":" + getName()
2732 + " Result differs from reference, Expected: "
2733 + de::toString(tcu::toHex<8>(reference.get(pos)))
2735 + de::toString(tcu::toHex<8>(data[pos]))
2737 + de::toString(pos));
2744 vkd.unmapMemory(device, *memory);
2748 class BufferCopyFromImage : public CmdCommand
2751 BufferCopyFromImage (deUint32 seed) : m_seed(seed) {}
2752 ~BufferCopyFromImage (void) {}
2753 const char* getName (void) const { return "BufferCopyFromImage"; }
2755 void logPrepare (TestLog& log, size_t commandIndex) const;
2756 void prepare (PrepareContext& context);
2757 void logSubmit (TestLog& log, size_t commandIndex) const;
2758 void submit (SubmitContext& context);
2759 void verify (VerifyContext& context, size_t commandIndex);
2762 const deUint32 m_seed;
2763 deInt32 m_imageWidth;
2764 deInt32 m_imageHeight;
2765 vk::Move<vk::VkImage> m_srcImage;
2766 vk::Move<vk::VkDeviceMemory> m_memory;
2769 void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
2771 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
2774 void BufferCopyFromImage::prepare (PrepareContext& context)
2776 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2777 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2778 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2779 const vk::VkDevice device = context.getContext().getDevice();
2780 const vk::VkQueue queue = context.getContext().getQueue();
2781 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2782 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2783 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2785 m_imageWidth = imageSize[0];
2786 m_imageHeight = imageSize[1];
2789 const vk::VkImageCreateInfo createInfo =
2791 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2795 vk::VK_IMAGE_TYPE_2D,
2796 vk::VK_FORMAT_R8G8B8A8_UNORM,
2798 (deUint32)m_imageWidth,
2799 (deUint32)m_imageHeight,
2802 1, 1, // mipLevels, arrayLayers
2803 vk::VK_SAMPLE_COUNT_1_BIT,
2805 vk::VK_IMAGE_TILING_OPTIMAL,
2806 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2807 vk::VK_SHARING_MODE_EXCLUSIVE,
2809 (deUint32)queueFamilies.size(),
2811 vk::VK_IMAGE_LAYOUT_UNDEFINED
2814 m_srcImage = vk::createImage(vkd, device, &createInfo);
2817 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
2820 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2821 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2822 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2823 const vk::VkImageMemoryBarrier preImageBarrier =
2825 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2829 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2831 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2832 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2834 VK_QUEUE_FAMILY_IGNORED,
2835 VK_QUEUE_FAMILY_IGNORED,
2839 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2841 1, // Mip level count
2846 const vk::VkImageMemoryBarrier postImageBarrier =
2848 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2851 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2854 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2855 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2857 VK_QUEUE_FAMILY_IGNORED,
2858 VK_QUEUE_FAMILY_IGNORED,
2862 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2864 1, // Mip level count
2869 const vk::VkBufferImageCopy region =
2874 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2881 (deUint32)m_imageWidth,
2882 (deUint32)m_imageHeight,
2888 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2889 de::Random rng (m_seed);
2892 deUint8* const data = (deUint8*)ptr;
2894 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2895 data[ndx] = rng.getUint8();
2898 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2899 vkd.unmapMemory(device, *memory);
2902 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
2903 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2904 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
2906 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2907 queueRun(vkd, queue, *commandBuffer);
2911 void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
2913 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
2916 void BufferCopyFromImage::submit (SubmitContext& context)
2918 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2919 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2920 const vk::VkBufferImageCopy region =
2925 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2932 (deUint32)m_imageWidth,
2933 (deUint32)m_imageHeight,
2938 vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, ®ion);
2941 void BufferCopyFromImage::verify (VerifyContext& context, size_t)
2943 ReferenceMemory& reference (context.getReference());
2944 de::Random rng (m_seed);
2946 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2947 reference.set(ndx, rng.getUint8());
2950 class ImageCopyToBuffer : public CmdCommand
2953 ImageCopyToBuffer (vk::VkImageLayout imageLayout) : m_imageLayout (imageLayout) {}
2954 ~ImageCopyToBuffer (void) {}
2955 const char* getName (void) const { return "BufferCopyToImage"; }
2957 void logPrepare (TestLog& log, size_t commandIndex) const;
2958 void prepare (PrepareContext& context);
2959 void logSubmit (TestLog& log, size_t commandIndex) const;
2960 void submit (SubmitContext& context);
2961 void verify (VerifyContext& context, size_t commandIndex);
2964 vk::VkImageLayout m_imageLayout;
2965 vk::VkDeviceSize m_bufferSize;
2966 vk::Move<vk::VkBuffer> m_dstBuffer;
2967 vk::Move<vk::VkDeviceMemory> m_memory;
2968 vk::VkDeviceSize m_imageMemorySize;
2969 deInt32 m_imageWidth;
2970 deInt32 m_imageHeight;
2973 void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2975 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
2978 void ImageCopyToBuffer::prepare (PrepareContext& context)
2980 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2981 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2982 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2983 const vk::VkDevice device = context.getContext().getDevice();
2984 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2986 m_imageWidth = context.getImageWidth();
2987 m_imageHeight = context.getImageHeight();
2988 m_bufferSize = 4 * m_imageWidth * m_imageHeight;
2989 m_imageMemorySize = context.getImageMemorySize();
2990 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2991 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2994 void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2996 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
2999 void ImageCopyToBuffer::submit (SubmitContext& context)
3001 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3002 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3003 const vk::VkBufferImageCopy region =
3008 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3015 (deUint32)m_imageWidth,
3016 (deUint32)m_imageHeight,
3021 vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), m_imageLayout, *m_dstBuffer, 1, ®ion);
3024 void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
3026 tcu::ResultCollector& resultCollector (context.getResultCollector());
3027 ReferenceMemory& reference (context.getReference());
3028 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3029 const vk::VkDevice device = context.getContext().getDevice();
3030 const vk::VkQueue queue = context.getContext().getQueue();
3031 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3032 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3033 const vk::VkBufferMemoryBarrier barrier =
3035 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3038 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3039 vk::VK_ACCESS_HOST_READ_BIT,
3041 VK_QUEUE_FAMILY_IGNORED,
3042 VK_QUEUE_FAMILY_IGNORED,
3048 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3050 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3051 queueRun(vkd, queue, *commandBuffer);
3053 reference.setUndefined(0, (size_t)m_imageMemorySize);
3055 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3056 const ConstPixelBufferAccess referenceImage (context.getReferenceImage().getAccess());
3057 const ConstPixelBufferAccess resultImage (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
3059 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3061 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3062 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3064 vkd.unmapMemory(device, *m_memory);
3068 class ImageCopyFromBuffer : public CmdCommand
3071 ImageCopyFromBuffer (deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3072 ~ImageCopyFromBuffer (void) {}
3073 const char* getName (void) const { return "ImageCopyFromBuffer"; }
3075 void logPrepare (TestLog& log, size_t commandIndex) const;
3076 void prepare (PrepareContext& context);
3077 void logSubmit (TestLog& log, size_t commandIndex) const;
3078 void submit (SubmitContext& context);
3079 void verify (VerifyContext& context, size_t commandIndex);
3082 const deUint32 m_seed;
3083 const vk::VkImageLayout m_imageLayout;
3084 deInt32 m_imageWidth;
3085 deInt32 m_imageHeight;
3086 vk::VkDeviceSize m_imageMemorySize;
3087 vk::VkDeviceSize m_bufferSize;
3088 vk::Move<vk::VkBuffer> m_srcBuffer;
3089 vk::Move<vk::VkDeviceMemory> m_memory;
3092 void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
3094 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
3097 void ImageCopyFromBuffer::prepare (PrepareContext& context)
3099 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3100 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3101 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3102 const vk::VkDevice device = context.getContext().getDevice();
3103 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3105 m_imageWidth = context.getImageHeight();
3106 m_imageHeight = context.getImageWidth();
3107 m_imageMemorySize = context.getImageMemorySize();
3108 m_bufferSize = m_imageWidth * m_imageHeight * 4;
3109 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
3110 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
3113 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3114 de::Random rng (m_seed);
3117 deUint8* const data = (deUint8*)ptr;
3119 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
3120 data[ndx] = rng.getUint8();
3123 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3124 vkd.unmapMemory(device, *m_memory);
3128 void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
3130 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
3133 void ImageCopyFromBuffer::submit (SubmitContext& context)
3135 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3136 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3137 const vk::VkBufferImageCopy region =
3142 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3149 (deUint32)m_imageWidth,
3150 (deUint32)m_imageHeight,
3155 vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), m_imageLayout, 1, ®ion);
3158 void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
3160 ReferenceMemory& reference (context.getReference());
3161 de::Random rng (m_seed);
3163 reference.setUndefined(0, (size_t)m_imageMemorySize);
3166 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3168 for (deInt32 y = 0; y < m_imageHeight; y++)
3169 for (deInt32 x = 0; x < m_imageWidth; x++)
3171 const deUint8 r8 = rng.getUint8();
3172 const deUint8 g8 = rng.getUint8();
3173 const deUint8 b8 = rng.getUint8();
3174 const deUint8 a8 = rng.getUint8();
3176 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3181 class ImageCopyFromImage : public CmdCommand
3184 ImageCopyFromImage (deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3185 ~ImageCopyFromImage (void) {}
3186 const char* getName (void) const { return "ImageCopyFromImage"; }
3188 void logPrepare (TestLog& log, size_t commandIndex) const;
3189 void prepare (PrepareContext& context);
3190 void logSubmit (TestLog& log, size_t commandIndex) const;
3191 void submit (SubmitContext& context);
3192 void verify (VerifyContext& context, size_t commandIndex);
3195 const deUint32 m_seed;
3196 const vk::VkImageLayout m_imageLayout;
3197 deInt32 m_imageWidth;
3198 deInt32 m_imageHeight;
3199 vk::VkDeviceSize m_imageMemorySize;
3200 vk::Move<vk::VkImage> m_srcImage;
3201 vk::Move<vk::VkDeviceMemory> m_memory;
3204 void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3206 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
3209 void ImageCopyFromImage::prepare (PrepareContext& context)
3211 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3212 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3213 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3214 const vk::VkDevice device = context.getContext().getDevice();
3215 const vk::VkQueue queue = context.getContext().getQueue();
3216 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3217 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3219 m_imageWidth = context.getImageWidth();
3220 m_imageHeight = context.getImageHeight();
3221 m_imageMemorySize = context.getImageMemorySize();
3224 const vk::VkImageCreateInfo createInfo =
3226 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3230 vk::VK_IMAGE_TYPE_2D,
3231 vk::VK_FORMAT_R8G8B8A8_UNORM,
3233 (deUint32)m_imageWidth,
3234 (deUint32)m_imageHeight,
3237 1, 1, // mipLevels, arrayLayers
3238 vk::VK_SAMPLE_COUNT_1_BIT,
3240 vk::VK_IMAGE_TILING_OPTIMAL,
3241 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3242 vk::VK_SHARING_MODE_EXCLUSIVE,
3244 (deUint32)queueFamilies.size(),
3246 vk::VK_IMAGE_LAYOUT_UNDEFINED
3249 m_srcImage = vk::createImage(vkd, device, &createInfo);
3252 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3255 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3256 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3257 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3258 const vk::VkImageMemoryBarrier preImageBarrier =
3260 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3264 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3266 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3267 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3269 VK_QUEUE_FAMILY_IGNORED,
3270 VK_QUEUE_FAMILY_IGNORED,
3274 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3276 1, // Mip level count
3281 const vk::VkImageMemoryBarrier postImageBarrier =
3283 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3286 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3289 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3290 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3292 VK_QUEUE_FAMILY_IGNORED,
3293 VK_QUEUE_FAMILY_IGNORED,
3297 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3299 1, // Mip level count
3304 const vk::VkBufferImageCopy region =
3309 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3316 (deUint32)m_imageWidth,
3317 (deUint32)m_imageHeight,
3323 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3324 de::Random rng (m_seed);
3327 deUint8* const data = (deUint8*)ptr;
3329 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
3330 data[ndx] = rng.getUint8();
3333 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3334 vkd.unmapMemory(device, *memory);
3337 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3338 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3339 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3341 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3342 queueRun(vkd, queue, *commandBuffer);
3346 void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3348 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
3351 void ImageCopyFromImage::submit (SubmitContext& context)
3353 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3354 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3355 const vk::VkImageCopy region =
3358 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3366 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3373 (deUint32)m_imageWidth,
3374 (deUint32)m_imageHeight,
3379 vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, ®ion);
3382 void ImageCopyFromImage::verify (VerifyContext& context, size_t)
3384 ReferenceMemory& reference (context.getReference());
3385 de::Random rng (m_seed);
3387 reference.setUndefined(0, (size_t)m_imageMemorySize);
3390 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3392 for (deInt32 y = 0; y < m_imageHeight; y++)
3393 for (deInt32 x = 0; x < m_imageWidth; x++)
3395 const deUint8 r8 = rng.getUint8();
3396 const deUint8 g8 = rng.getUint8();
3397 const deUint8 b8 = rng.getUint8();
3398 const deUint8 a8 = rng.getUint8();
3400 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3405 class ImageCopyToImage : public CmdCommand
3408 ImageCopyToImage (vk::VkImageLayout imageLayout) : m_imageLayout(imageLayout) {}
3409 ~ImageCopyToImage (void) {}
3410 const char* getName (void) const { return "ImageCopyToImage"; }
3412 void logPrepare (TestLog& log, size_t commandIndex) const;
3413 void prepare (PrepareContext& context);
3414 void logSubmit (TestLog& log, size_t commandIndex) const;
3415 void submit (SubmitContext& context);
3416 void verify (VerifyContext& context, size_t commandIndex);
3419 const vk::VkImageLayout m_imageLayout;
3420 deInt32 m_imageWidth;
3421 deInt32 m_imageHeight;
3422 vk::VkDeviceSize m_imageMemorySize;
3423 vk::Move<vk::VkImage> m_dstImage;
3424 vk::Move<vk::VkDeviceMemory> m_memory;
3427 void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
3429 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
3432 void ImageCopyToImage::prepare (PrepareContext& context)
3434 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3435 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3436 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3437 const vk::VkDevice device = context.getContext().getDevice();
3438 const vk::VkQueue queue = context.getContext().getQueue();
3439 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3440 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3442 m_imageWidth = context.getImageWidth();
3443 m_imageHeight = context.getImageHeight();
3444 m_imageMemorySize = context.getImageMemorySize();
3447 const vk::VkImageCreateInfo createInfo =
3449 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3453 vk::VK_IMAGE_TYPE_2D,
3454 vk::VK_FORMAT_R8G8B8A8_UNORM,
3456 (deUint32)m_imageWidth,
3457 (deUint32)m_imageHeight,
3460 1, 1, // mipLevels, arrayLayers
3461 vk::VK_SAMPLE_COUNT_1_BIT,
3463 vk::VK_IMAGE_TILING_OPTIMAL,
3464 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3465 vk::VK_SHARING_MODE_EXCLUSIVE,
3467 (deUint32)queueFamilies.size(),
3469 vk::VK_IMAGE_LAYOUT_UNDEFINED
3472 m_dstImage = vk::createImage(vkd, device, &createInfo);
3475 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3478 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3479 const vk::VkImageMemoryBarrier barrier =
3481 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3485 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3487 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3488 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3490 VK_QUEUE_FAMILY_IGNORED,
3491 VK_QUEUE_FAMILY_IGNORED,
3495 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3497 1, // Mip level count
3503 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
3505 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3506 queueRun(vkd, queue, *commandBuffer);
3510 void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
3512 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
3515 void ImageCopyToImage::submit (SubmitContext& context)
3517 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3518 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3519 const vk::VkImageCopy region =
3522 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3530 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3537 (deUint32)m_imageWidth,
3538 (deUint32)m_imageHeight,
3543 vkd.cmdCopyImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3546 void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
3548 tcu::ResultCollector& resultCollector (context.getResultCollector());
3549 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3550 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3551 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3552 const vk::VkDevice device = context.getContext().getDevice();
3553 const vk::VkQueue queue = context.getContext().getQueue();
3554 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3555 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3556 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3557 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3558 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3560 const vk::VkImageMemoryBarrier imageBarrier =
3562 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3565 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3566 vk::VK_ACCESS_TRANSFER_READ_BIT,
3568 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3569 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3571 VK_QUEUE_FAMILY_IGNORED,
3572 VK_QUEUE_FAMILY_IGNORED,
3576 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3578 1, // Mip level count
3583 const vk::VkBufferMemoryBarrier bufferBarrier =
3585 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3588 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3589 vk::VK_ACCESS_HOST_READ_BIT,
3591 VK_QUEUE_FAMILY_IGNORED,
3592 VK_QUEUE_FAMILY_IGNORED,
3597 const vk::VkBufferImageCopy region =
3602 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3609 (deUint32)m_imageWidth,
3610 (deUint32)m_imageHeight,
3615 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
3616 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
3617 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3620 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3621 queueRun(vkd, queue, *commandBuffer);
3624 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3626 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3629 const deUint8* const data = (const deUint8*)ptr;
3630 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
3631 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3633 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3634 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3637 vkd.unmapMemory(device, *memory);
3647 class ImageBlitFromImage : public CmdCommand
3650 ImageBlitFromImage (deUint32 seed, BlitScale scale, vk::VkImageLayout imageLayout) : m_seed(seed), m_scale(scale), m_imageLayout(imageLayout) {}
3651 ~ImageBlitFromImage (void) {}
3652 const char* getName (void) const { return "ImageBlitFromImage"; }
3654 void logPrepare (TestLog& log, size_t commandIndex) const;
3655 void prepare (PrepareContext& context);
3656 void logSubmit (TestLog& log, size_t commandIndex) const;
3657 void submit (SubmitContext& context);
3658 void verify (VerifyContext& context, size_t commandIndex);
3661 const deUint32 m_seed;
3662 const BlitScale m_scale;
3663 const vk::VkImageLayout m_imageLayout;
3664 deInt32 m_imageWidth;
3665 deInt32 m_imageHeight;
3666 vk::VkDeviceSize m_imageMemorySize;
3667 deInt32 m_srcImageWidth;
3668 deInt32 m_srcImageHeight;
3669 vk::Move<vk::VkImage> m_srcImage;
3670 vk::Move<vk::VkDeviceMemory> m_memory;
3673 void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3675 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
3678 void ImageBlitFromImage::prepare (PrepareContext& context)
3680 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3681 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3682 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3683 const vk::VkDevice device = context.getContext().getDevice();
3684 const vk::VkQueue queue = context.getContext().getQueue();
3685 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3686 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3688 m_imageWidth = context.getImageWidth();
3689 m_imageHeight = context.getImageHeight();
3690 m_imageMemorySize = context.getImageMemorySize();
3692 if (m_scale == BLIT_SCALE_10)
3694 m_srcImageWidth = m_imageWidth;
3695 m_srcImageHeight = m_imageHeight;
3697 else if (m_scale == BLIT_SCALE_20)
3699 m_srcImageWidth = m_imageWidth / 2;
3700 m_srcImageHeight = m_imageHeight / 2;
3703 DE_FATAL("Unsupported scale");
3706 const vk::VkImageCreateInfo createInfo =
3708 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3712 vk::VK_IMAGE_TYPE_2D,
3713 vk::VK_FORMAT_R8G8B8A8_UNORM,
3715 (deUint32)m_srcImageWidth,
3716 (deUint32)m_srcImageHeight,
3719 1, 1, // mipLevels, arrayLayers
3720 vk::VK_SAMPLE_COUNT_1_BIT,
3722 vk::VK_IMAGE_TILING_OPTIMAL,
3723 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3724 vk::VK_SHARING_MODE_EXCLUSIVE,
3726 (deUint32)queueFamilies.size(),
3728 vk::VK_IMAGE_LAYOUT_UNDEFINED
3731 m_srcImage = vk::createImage(vkd, device, &createInfo);
3734 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3737 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3738 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3739 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3740 const vk::VkImageMemoryBarrier preImageBarrier =
3742 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3746 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3748 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3749 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3751 VK_QUEUE_FAMILY_IGNORED,
3752 VK_QUEUE_FAMILY_IGNORED,
3756 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3758 1, // Mip level count
3763 const vk::VkImageMemoryBarrier postImageBarrier =
3765 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3768 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3771 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3772 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3774 VK_QUEUE_FAMILY_IGNORED,
3775 VK_QUEUE_FAMILY_IGNORED,
3779 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3781 1, // Mip level count
3786 const vk::VkBufferImageCopy region =
3791 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3798 (deUint32)m_srcImageWidth,
3799 (deUint32)m_srcImageHeight,
3805 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
3806 de::Random rng (m_seed);
3809 deUint8* const data = (deUint8*)ptr;
3811 for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
3812 data[ndx] = rng.getUint8();
3815 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
3816 vkd.unmapMemory(device, *memory);
3819 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3820 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3821 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3823 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3824 queueRun(vkd, queue, *commandBuffer);
3828 void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3830 log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
3833 void ImageBlitFromImage::submit (SubmitContext& context)
3835 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3836 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3837 const vk::VkImageBlit region =
3841 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3857 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3871 vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, ®ion, vk::VK_FILTER_NEAREST);
3874 void ImageBlitFromImage::verify (VerifyContext& context, size_t)
3876 ReferenceMemory& reference (context.getReference());
3877 de::Random rng (m_seed);
3879 reference.setUndefined(0, (size_t)m_imageMemorySize);
3882 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3884 if (m_scale == BLIT_SCALE_10)
3886 for (deInt32 y = 0; y < m_imageHeight; y++)
3887 for (deInt32 x = 0; x < m_imageWidth; x++)
3889 const deUint8 r8 = rng.getUint8();
3890 const deUint8 g8 = rng.getUint8();
3891 const deUint8 b8 = rng.getUint8();
3892 const deUint8 a8 = rng.getUint8();
3894 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3897 else if (m_scale == BLIT_SCALE_20)
3899 tcu::TextureLevel source (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
3900 const float xscale = ((float)m_srcImageWidth) / (float)m_imageWidth;
3901 const float yscale = ((float)m_srcImageHeight) / (float)m_imageHeight;
3903 for (deInt32 y = 0; y < m_srcImageHeight; y++)
3904 for (deInt32 x = 0; x < m_srcImageWidth; x++)
3906 const deUint8 r8 = rng.getUint8();
3907 const deUint8 g8 = rng.getUint8();
3908 const deUint8 b8 = rng.getUint8();
3909 const deUint8 a8 = rng.getUint8();
3911 source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
3914 for (deInt32 y = 0; y < m_imageHeight; y++)
3915 for (deInt32 x = 0; x < m_imageWidth; x++)
3916 refAccess.setPixel(source.getAccess().getPixelUint(int(float(x) * xscale), int(float(y) * yscale)), x, y);
3919 DE_FATAL("Unsupported scale");
3923 class ImageBlitToImage : public CmdCommand
3926 ImageBlitToImage (BlitScale scale, vk::VkImageLayout imageLayout) : m_scale(scale), m_imageLayout(imageLayout) {}
3927 ~ImageBlitToImage (void) {}
3928 const char* getName (void) const { return "ImageBlitToImage"; }
3930 void logPrepare (TestLog& log, size_t commandIndex) const;
3931 void prepare (PrepareContext& context);
3932 void logSubmit (TestLog& log, size_t commandIndex) const;
3933 void submit (SubmitContext& context);
3934 void verify (VerifyContext& context, size_t commandIndex);
3937 const BlitScale m_scale;
3938 const vk::VkImageLayout m_imageLayout;
3939 deInt32 m_imageWidth;
3940 deInt32 m_imageHeight;
3941 vk::VkDeviceSize m_imageMemorySize;
3942 deInt32 m_dstImageWidth;
3943 deInt32 m_dstImageHeight;
3944 vk::Move<vk::VkImage> m_dstImage;
3945 vk::Move<vk::VkDeviceMemory> m_memory;
3948 void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
3950 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
3953 void ImageBlitToImage::prepare (PrepareContext& context)
3955 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3956 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3957 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3958 const vk::VkDevice device = context.getContext().getDevice();
3959 const vk::VkQueue queue = context.getContext().getQueue();
3960 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3961 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3963 m_imageWidth = context.getImageWidth();
3964 m_imageHeight = context.getImageHeight();
3965 m_imageMemorySize = context.getImageMemorySize();
3967 if (m_scale == BLIT_SCALE_10)
3969 m_dstImageWidth = context.getImageWidth();
3970 m_dstImageHeight = context.getImageHeight();
3972 else if (m_scale == BLIT_SCALE_20)
3974 m_dstImageWidth = context.getImageWidth() * 2;
3975 m_dstImageHeight = context.getImageHeight() * 2;
3978 DE_FATAL("Unsupportd blit scale");
3981 const vk::VkImageCreateInfo createInfo =
3983 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3987 vk::VK_IMAGE_TYPE_2D,
3988 vk::VK_FORMAT_R8G8B8A8_UNORM,
3990 (deUint32)m_dstImageWidth,
3991 (deUint32)m_dstImageHeight,
3994 1, 1, // mipLevels, arrayLayers
3995 vk::VK_SAMPLE_COUNT_1_BIT,
3997 vk::VK_IMAGE_TILING_OPTIMAL,
3998 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3999 vk::VK_SHARING_MODE_EXCLUSIVE,
4001 (deUint32)queueFamilies.size(),
4003 vk::VK_IMAGE_LAYOUT_UNDEFINED
4006 m_dstImage = vk::createImage(vkd, device, &createInfo);
4009 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
4012 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4013 const vk::VkImageMemoryBarrier barrier =
4015 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4019 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4021 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4022 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4024 VK_QUEUE_FAMILY_IGNORED,
4025 VK_QUEUE_FAMILY_IGNORED,
4029 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4031 1, // Mip level count
4037 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
4039 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4040 queueRun(vkd, queue, *commandBuffer);
4044 void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
4046 log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
4049 void ImageBlitToImage::submit (SubmitContext& context)
4051 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4052 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4053 const vk::VkImageBlit region =
4057 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4073 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4087 vkd.cmdBlitImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion, vk::VK_FILTER_NEAREST);
4090 void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
4092 tcu::ResultCollector& resultCollector (context.getResultCollector());
4093 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4094 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4095 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4096 const vk::VkDevice device = context.getContext().getDevice();
4097 const vk::VkQueue queue = context.getContext().getQueue();
4098 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4099 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4100 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4101 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4102 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4104 const vk::VkImageMemoryBarrier imageBarrier =
4106 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4109 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4110 vk::VK_ACCESS_TRANSFER_READ_BIT,
4112 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4113 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4115 VK_QUEUE_FAMILY_IGNORED,
4116 VK_QUEUE_FAMILY_IGNORED,
4120 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4122 1, // Mip level count
4127 const vk::VkBufferMemoryBarrier bufferBarrier =
4129 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4132 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4133 vk::VK_ACCESS_HOST_READ_BIT,
4135 VK_QUEUE_FAMILY_IGNORED,
4136 VK_QUEUE_FAMILY_IGNORED,
4141 const vk::VkBufferImageCopy region =
4146 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4153 (deUint32)m_dstImageWidth,
4154 (deUint32)m_dstImageHeight,
4159 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4160 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4161 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4164 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4165 queueRun(vkd, queue, *commandBuffer);
4168 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
4170 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_dstImageWidth * m_dstImageHeight);
4172 if (m_scale == BLIT_SCALE_10)
4174 const deUint8* const data = (const deUint8*)ptr;
4175 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4176 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4178 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4179 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4181 else if (m_scale == BLIT_SCALE_20)
4183 const deUint8* const data = (const deUint8*)ptr;
4184 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4185 tcu::TextureLevel reference (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
4188 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4190 for (deInt32 y = 0; y < m_dstImageHeight; y++)
4191 for (deInt32 x = 0; x < m_dstImageWidth; x++)
4193 reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
4197 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4198 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4201 DE_FATAL("Unknown scale");
4203 vkd.unmapMemory(device, *memory);
4207 class PrepareRenderPassContext
4210 PrepareRenderPassContext (PrepareContext& context,
4211 vk::VkRenderPass renderPass,
4212 vk::VkFramebuffer framebuffer,
4213 deInt32 targetWidth,
4214 deInt32 targetHeight)
4215 : m_context (context)
4216 , m_renderPass (renderPass)
4217 , m_framebuffer (framebuffer)
4218 , m_targetWidth (targetWidth)
4219 , m_targetHeight (targetHeight)
4223 const Memory& getMemory (void) const { return m_context.getMemory(); }
4224 const Context& getContext (void) const { return m_context.getContext(); }
4225 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
4227 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
4228 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
4230 vk::VkImage getImage (void) const { return m_context.getImage(); }
4231 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
4232 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
4233 vk::VkImageLayout getImageLayout (void) const { return m_context.getImageLayout(); }
4235 deInt32 getTargetWidth (void) const { return m_targetWidth; }
4236 deInt32 getTargetHeight (void) const { return m_targetHeight; }
4238 vk::VkRenderPass getRenderPass (void) const { return m_renderPass; }
4241 PrepareContext& m_context;
4242 const vk::VkRenderPass m_renderPass;
4243 const vk::VkFramebuffer m_framebuffer;
4244 const deInt32 m_targetWidth;
4245 const deInt32 m_targetHeight;
4248 class VerifyRenderPassContext
4251 VerifyRenderPassContext (VerifyContext& context,
4252 deInt32 targetWidth,
4253 deInt32 targetHeight)
4254 : m_context (context)
4255 , m_referenceTarget (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
4259 const Context& getContext (void) const { return m_context.getContext(); }
4260 TestLog& getLog (void) const { return m_context.getLog(); }
4261 tcu::ResultCollector& getResultCollector (void) const { return m_context.getResultCollector(); }
4263 TextureLevel& getReferenceTarget (void) { return m_referenceTarget; }
4265 ReferenceMemory& getReference (void) { return m_context.getReference(); }
4266 TextureLevel& getReferenceImage (void) { return m_context.getReferenceImage();}
4269 VerifyContext& m_context;
4270 TextureLevel m_referenceTarget;
4273 class RenderPassCommand
4276 virtual ~RenderPassCommand (void) {}
4277 virtual const char* getName (void) const = 0;
4279 // Log things that are done during prepare
4280 virtual void logPrepare (TestLog&, size_t) const {}
4281 // Log submitted calls etc.
4282 virtual void logSubmit (TestLog&, size_t) const {}
4284 // Allocate vulkan resources and prepare for submit.
4285 virtual void prepare (PrepareRenderPassContext&) {}
4287 // Submit commands to command buffer.
4288 virtual void submit (SubmitContext&) {}
4291 virtual void verify (VerifyRenderPassContext&, size_t) {}
4294 class SubmitRenderPass : public CmdCommand
4297 SubmitRenderPass (const vector<RenderPassCommand*>& commands);
4298 ~SubmitRenderPass (void);
4299 const char* getName (void) const { return "SubmitRenderPass"; }
4301 void logPrepare (TestLog&, size_t) const;
4302 void logSubmit (TestLog&, size_t) const;
4304 void prepare (PrepareContext&);
4305 void submit (SubmitContext&);
4307 void verify (VerifyContext&, size_t);
4310 const deInt32 m_targetWidth;
4311 const deInt32 m_targetHeight;
4312 vk::Move<vk::VkRenderPass> m_renderPass;
4313 vk::Move<vk::VkDeviceMemory> m_colorTargetMemory;
4314 de::MovePtr<vk::Allocation> m_colorTargetMemory2;
4315 vk::Move<vk::VkImage> m_colorTarget;
4316 vk::Move<vk::VkImageView> m_colorTargetView;
4317 vk::Move<vk::VkFramebuffer> m_framebuffer;
4318 vector<RenderPassCommand*> m_commands;
4321 SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
4322 : m_targetWidth (256)
4323 , m_targetHeight (256)
4324 , m_commands (commands)
4328 SubmitRenderPass::~SubmitRenderPass()
4330 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4331 delete m_commands[cmdNdx];
4334 void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
4336 const string sectionName (de::toString(commandIndex) + ":" + getName());
4337 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4339 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4341 RenderPassCommand& command = *m_commands[cmdNdx];
4342 command.logPrepare(log, cmdNdx);
4346 void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
4348 const string sectionName (de::toString(commandIndex) + ":" + getName());
4349 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4351 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4353 RenderPassCommand& command = *m_commands[cmdNdx];
4354 command.logSubmit(log, cmdNdx);
4358 void SubmitRenderPass::prepare (PrepareContext& context)
4360 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4361 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4362 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4363 const vk::VkDevice device = context.getContext().getDevice();
4364 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4366 const vk::VkAttachmentReference colorAttachments[] =
4368 { 0, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }
4370 const vk::VkSubpassDescription subpass =
4373 vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
4378 DE_LENGTH_OF_ARRAY(colorAttachments),
4385 const vk::VkAttachmentDescription attachment =
4388 vk::VK_FORMAT_R8G8B8A8_UNORM,
4389 vk::VK_SAMPLE_COUNT_1_BIT,
4391 vk::VK_ATTACHMENT_LOAD_OP_CLEAR,
4392 vk::VK_ATTACHMENT_STORE_OP_STORE,
4394 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
4395 vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,
4397 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4398 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
4401 const vk::VkImageCreateInfo createInfo =
4403 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
4407 vk::VK_IMAGE_TYPE_2D,
4408 vk::VK_FORMAT_R8G8B8A8_UNORM,
4409 { (deUint32)m_targetWidth, (deUint32)m_targetHeight, 1u },
4412 vk::VK_SAMPLE_COUNT_1_BIT,
4413 vk::VK_IMAGE_TILING_OPTIMAL,
4414 vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
4415 vk::VK_SHARING_MODE_EXCLUSIVE,
4416 (deUint32)queueFamilies.size(),
4418 vk::VK_IMAGE_LAYOUT_UNDEFINED
4421 m_colorTarget = vk::createImage(vkd, device, &createInfo);
4424 m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
4427 const vk::VkImageViewCreateInfo createInfo =
4429 vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4434 vk::VK_IMAGE_VIEW_TYPE_2D,
4435 vk::VK_FORMAT_R8G8B8A8_UNORM,
4437 vk::VK_COMPONENT_SWIZZLE_R,
4438 vk::VK_COMPONENT_SWIZZLE_G,
4439 vk::VK_COMPONENT_SWIZZLE_B,
4440 vk::VK_COMPONENT_SWIZZLE_A
4443 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4451 m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
4454 const vk::VkRenderPassCreateInfo createInfo =
4456 vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
4470 m_renderPass = vk::createRenderPass(vkd, device, &createInfo);
4474 const vk::VkImageView imageViews[] =
4478 const vk::VkFramebufferCreateInfo createInfo =
4480 vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4485 DE_LENGTH_OF_ARRAY(imageViews),
4487 (deUint32)m_targetWidth,
4488 (deUint32)m_targetHeight,
4492 m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
4496 PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
4498 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4500 RenderPassCommand& command = *m_commands[cmdNdx];
4501 command.prepare(renderpassContext);
4506 void SubmitRenderPass::submit (SubmitContext& context)
4508 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4509 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4510 const vk::VkClearValue clearValue = vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 1.0f);
4512 const vk::VkRenderPassBeginInfo beginInfo =
4514 vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
4520 { { 0, 0 }, { (deUint32)m_targetWidth, (deUint32)m_targetHeight } },
4525 vkd.cmdBeginRenderPass(commandBuffer, &beginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
4527 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4529 RenderPassCommand& command = *m_commands[cmdNdx];
4531 command.submit(context);
4534 vkd.cmdEndRenderPass(commandBuffer);
4537 void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
4539 TestLog& log (context.getLog());
4540 tcu::ResultCollector& resultCollector (context.getResultCollector());
4541 const string sectionName (de::toString(commandIndex) + ":" + getName());
4542 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4543 VerifyRenderPassContext verifyContext (context, m_targetWidth, m_targetHeight);
4545 tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4547 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4549 RenderPassCommand& command = *m_commands[cmdNdx];
4550 command.verify(verifyContext, cmdNdx);
4554 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4555 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4556 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4557 const vk::VkDevice device = context.getContext().getDevice();
4558 const vk::VkQueue queue = context.getContext().getQueue();
4559 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4560 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4561 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4562 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4563 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4565 const vk::VkImageMemoryBarrier imageBarrier =
4567 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4570 vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
4571 vk::VK_ACCESS_TRANSFER_READ_BIT,
4573 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4574 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4576 VK_QUEUE_FAMILY_IGNORED,
4577 VK_QUEUE_FAMILY_IGNORED,
4581 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4583 1, // Mip level count
4588 const vk::VkBufferMemoryBarrier bufferBarrier =
4590 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4593 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4594 vk::VK_ACCESS_HOST_READ_BIT,
4596 VK_QUEUE_FAMILY_IGNORED,
4597 VK_QUEUE_FAMILY_IGNORED,
4602 const vk::VkBufferImageCopy region =
4607 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4614 (deUint32)m_targetWidth,
4615 (deUint32)m_targetHeight,
4620 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4621 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4622 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4625 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4626 queueRun(vkd, queue, *commandBuffer);
4629 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
4631 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_targetWidth * m_targetHeight);
4634 const deUint8* const data = (const deUint8*)ptr;
4635 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
4636 const ConstPixelBufferAccess& refAccess (verifyContext.getReferenceTarget().getAccess());
4638 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4639 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4642 vkd.unmapMemory(device, *memory);
4647 struct PipelineResources
4649 vk::Move<vk::VkPipeline> pipeline;
4650 vk::Move<vk::VkDescriptorSetLayout> descriptorSetLayout;
4651 vk::Move<vk::VkPipelineLayout> pipelineLayout;
4654 void createPipelineWithResources (const vk::DeviceInterface& vkd,
4655 const vk::VkDevice device,
4656 const vk::VkRenderPass renderPass,
4657 const deUint32 subpass,
4658 const vk::VkShaderModule& vertexShaderModule,
4659 const vk::VkShaderModule& fragmentShaderModule,
4660 const deUint32 viewPortWidth,
4661 const deUint32 viewPortHeight,
4662 const vector<vk::VkVertexInputBindingDescription>& vertexBindingDescriptions,
4663 const vector<vk::VkVertexInputAttributeDescription>& vertexAttributeDescriptions,
4664 const vector<vk::VkDescriptorSetLayoutBinding>& bindings,
4665 const vk::VkPrimitiveTopology topology,
4666 deUint32 pushConstantRangeCount,
4667 const vk::VkPushConstantRange* pushConstantRanges,
4668 PipelineResources& resources)
4670 if (!bindings.empty())
4672 const vk::VkDescriptorSetLayoutCreateInfo createInfo =
4674 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
4678 (deUint32)bindings.size(),
4679 bindings.empty() ? DE_NULL : &bindings[0]
4682 resources.descriptorSetLayout = vk::createDescriptorSetLayout(vkd, device, &createInfo);
4686 const vk::VkDescriptorSetLayout descriptorSetLayout_ = *resources.descriptorSetLayout;
4687 const vk::VkPipelineLayoutCreateInfo createInfo =
4689 vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
4693 resources.descriptorSetLayout ? 1u : 0u,
4694 resources.descriptorSetLayout ? &descriptorSetLayout_ : DE_NULL,
4696 pushConstantRangeCount,
4700 resources.pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
4704 const vk::VkPipelineShaderStageCreateInfo shaderStages[] =
4707 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4710 vk::VK_SHADER_STAGE_VERTEX_BIT,
4716 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4719 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
4720 fragmentShaderModule,
4725 const vk::VkPipelineDepthStencilStateCreateInfo depthStencilState =
4727 vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
4732 vk::VK_COMPARE_OP_ALWAYS,
4736 vk::VK_STENCIL_OP_KEEP,
4737 vk::VK_STENCIL_OP_KEEP,
4738 vk::VK_STENCIL_OP_KEEP,
4739 vk::VK_COMPARE_OP_ALWAYS,
4745 vk::VK_STENCIL_OP_KEEP,
4746 vk::VK_STENCIL_OP_KEEP,
4747 vk::VK_STENCIL_OP_KEEP,
4748 vk::VK_COMPARE_OP_ALWAYS,
4756 const vk::VkPipelineVertexInputStateCreateInfo vertexInputState =
4758 vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
4762 (deUint32)vertexBindingDescriptions.size(),
4763 vertexBindingDescriptions.empty() ? DE_NULL : &vertexBindingDescriptions[0],
4765 (deUint32)vertexAttributeDescriptions.size(),
4766 vertexAttributeDescriptions.empty() ? DE_NULL : &vertexAttributeDescriptions[0]
4768 const vk::VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
4770 vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
4776 const vk::VkViewport viewports[] =
4778 { 0.0f, 0.0f, (float)viewPortWidth, (float)viewPortHeight, 0.0f, 1.0f }
4780 const vk::VkRect2D scissors[] =
4782 { { 0, 0 }, { (deUint32)viewPortWidth, (deUint32)viewPortHeight } }
4784 const vk::VkPipelineViewportStateCreateInfo viewportState =
4786 vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
4789 DE_LENGTH_OF_ARRAY(viewports),
4791 DE_LENGTH_OF_ARRAY(scissors),
4794 const vk::VkPipelineRasterizationStateCreateInfo rasterState =
4796 vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
4802 vk::VK_POLYGON_MODE_FILL,
4803 vk::VK_CULL_MODE_NONE,
4804 vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,
4811 const vk::VkSampleMask sampleMask = ~0u;
4812 const vk::VkPipelineMultisampleStateCreateInfo multisampleState =
4814 vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
4818 vk::VK_SAMPLE_COUNT_1_BIT,
4825 const vk::VkPipelineColorBlendAttachmentState attachments[] =
4829 vk::VK_BLEND_FACTOR_ONE,
4830 vk::VK_BLEND_FACTOR_ZERO,
4831 vk::VK_BLEND_OP_ADD,
4832 vk::VK_BLEND_FACTOR_ONE,
4833 vk::VK_BLEND_FACTOR_ZERO,
4834 vk::VK_BLEND_OP_ADD,
4835 (vk::VK_COLOR_COMPONENT_R_BIT|
4836 vk::VK_COLOR_COMPONENT_G_BIT|
4837 vk::VK_COLOR_COMPONENT_B_BIT|
4838 vk::VK_COLOR_COMPONENT_A_BIT)
4841 const vk::VkPipelineColorBlendStateCreateInfo colorBlendState =
4843 vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
4848 vk::VK_LOGIC_OP_COPY,
4849 DE_LENGTH_OF_ARRAY(attachments),
4851 { 0.0f, 0.0f, 0.0f, 0.0f }
4853 const vk::VkGraphicsPipelineCreateInfo createInfo =
4855 vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
4859 DE_LENGTH_OF_ARRAY(shaderStages),
4863 &inputAssemblyState,
4871 *resources.pipelineLayout,
4878 resources.pipeline = vk::createGraphicsPipeline(vkd, device, 0, &createInfo);
4882 class RenderIndexBuffer : public RenderPassCommand
4885 RenderIndexBuffer (void) {}
4886 ~RenderIndexBuffer (void) {}
4888 const char* getName (void) const { return "RenderIndexBuffer"; }
4889 void logPrepare (TestLog&, size_t) const;
4890 void logSubmit (TestLog&, size_t) const;
4891 void prepare (PrepareRenderPassContext&);
4892 void submit (SubmitContext& context);
4893 void verify (VerifyRenderPassContext&, size_t);
4896 PipelineResources m_resources;
4897 vk::VkDeviceSize m_bufferSize;
4900 void RenderIndexBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4902 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as index buffer." << TestLog::EndMessage;
4905 void RenderIndexBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4907 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as index buffer." << TestLog::EndMessage;
4910 void RenderIndexBuffer::prepare (PrepareRenderPassContext& context)
4912 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4913 const vk::VkDevice device = context.getContext().getDevice();
4914 const vk::VkRenderPass renderPass = context.getRenderPass();
4915 const deUint32 subpass = 0;
4916 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("index-buffer.vert"), 0));
4917 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4919 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
4920 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), vector<vk::VkDescriptorSetLayoutBinding>(), vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
4921 m_bufferSize = context.getBufferSize();
4924 void RenderIndexBuffer::submit (SubmitContext& context)
4926 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4927 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4929 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
4930 vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
4931 vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
4934 void RenderIndexBuffer::verify (VerifyRenderPassContext& context, size_t)
4936 for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4938 const deUint8 x = context.getReference().get(pos * 2);
4939 const deUint8 y = context.getReference().get((pos * 2) + 1);
4941 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4945 class RenderVertexBuffer : public RenderPassCommand
4948 RenderVertexBuffer (void) {}
4949 ~RenderVertexBuffer (void) {}
4951 const char* getName (void) const { return "RenderVertexBuffer"; }
4952 void logPrepare (TestLog&, size_t) const;
4953 void logSubmit (TestLog&, size_t) const;
4954 void prepare (PrepareRenderPassContext&);
4955 void submit (SubmitContext& context);
4956 void verify (VerifyRenderPassContext&, size_t);
4959 PipelineResources m_resources;
4960 vk::VkDeviceSize m_bufferSize;
4963 void RenderVertexBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4965 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as vertex buffer." << TestLog::EndMessage;
4968 void RenderVertexBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4970 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as vertex buffer." << TestLog::EndMessage;
4973 void RenderVertexBuffer::prepare (PrepareRenderPassContext& context)
4975 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4976 const vk::VkDevice device = context.getContext().getDevice();
4977 const vk::VkRenderPass renderPass = context.getRenderPass();
4978 const deUint32 subpass = 0;
4979 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("vertex-buffer.vert"), 0));
4980 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4982 vector<vk::VkVertexInputAttributeDescription> vertexAttributeDescriptions;
4983 vector<vk::VkVertexInputBindingDescription> vertexBindingDescriptions;
4986 const vk::VkVertexInputBindingDescription vertexBindingDescription =
4990 vk::VK_VERTEX_INPUT_RATE_VERTEX
4993 vertexBindingDescriptions.push_back(vertexBindingDescription);
4996 const vk::VkVertexInputAttributeDescription vertexAttributeDescription =
5000 vk::VK_FORMAT_R8G8_UNORM,
5004 vertexAttributeDescriptions.push_back(vertexAttributeDescription);
5006 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5007 vertexBindingDescriptions, vertexAttributeDescriptions, vector<vk::VkDescriptorSetLayoutBinding>(), vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5009 m_bufferSize = context.getBufferSize();
5012 void RenderVertexBuffer::submit (SubmitContext& context)
5014 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5015 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5016 const vk::VkDeviceSize offset = 0;
5017 const vk::VkBuffer buffer = context.getBuffer();
5019 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5020 vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
5021 vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
5024 void RenderVertexBuffer::verify (VerifyRenderPassContext& context, size_t)
5026 for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
5028 const deUint8 x = context.getReference().get(pos * 2);
5029 const deUint8 y = context.getReference().get((pos * 2) + 1);
5031 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5035 class RenderVertexUniformBuffer : public RenderPassCommand
5038 RenderVertexUniformBuffer (void) {}
5039 ~RenderVertexUniformBuffer (void);
5041 const char* getName (void) const { return "RenderVertexUniformBuffer"; }
5042 void logPrepare (TestLog&, size_t) const;
5043 void logSubmit (TestLog&, size_t) const;
5044 void prepare (PrepareRenderPassContext&);
5045 void submit (SubmitContext& context);
5046 void verify (VerifyRenderPassContext&, size_t);
5049 PipelineResources m_resources;
5050 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5051 vector<vk::VkDescriptorSet> m_descriptorSets;
5053 vk::VkDeviceSize m_bufferSize;
5056 RenderVertexUniformBuffer::~RenderVertexUniformBuffer (void)
5060 void RenderVertexUniformBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5062 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
5065 void RenderVertexUniformBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5067 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
5070 void RenderVertexUniformBuffer::prepare (PrepareRenderPassContext& context)
5072 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5073 const vk::VkDevice device = context.getContext().getDevice();
5074 const vk::VkRenderPass renderPass = context.getRenderPass();
5075 const deUint32 subpass = 0;
5076 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-buffer.vert"), 0));
5077 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5078 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5080 m_bufferSize = context.getBufferSize();
5083 const vk::VkDescriptorSetLayoutBinding binding =
5086 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5088 vk::VK_SHADER_STAGE_VERTEX_BIT,
5092 bindings.push_back(binding);
5095 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5096 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5099 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE));
5100 const vk::VkDescriptorPoolSize poolSizes =
5102 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5105 const vk::VkDescriptorPoolCreateInfo createInfo =
5107 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5109 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5116 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5117 m_descriptorSets.resize(descriptorCount);
5120 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5122 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5123 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5125 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5133 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5136 const vk::VkDescriptorBufferInfo bufferInfo =
5138 context.getBuffer(),
5139 (vk::VkDeviceSize)(descriptorSetNdx * (size_t)MAX_UNIFORM_BUFFER_SIZE),
5140 m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5141 ? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5142 : (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5144 const vk::VkWriteDescriptorSet write =
5146 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5148 m_descriptorSets[descriptorSetNdx],
5152 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5158 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5163 void RenderVertexUniformBuffer::submit (SubmitContext& context)
5165 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5166 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5168 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5170 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5172 const size_t size = (size_t)(m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5173 ? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5174 : (size_t)MAX_UNIFORM_BUFFER_SIZE);
5175 const deUint32 count = (deUint32)(size / 2);
5177 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5178 vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5182 void RenderVertexUniformBuffer::verify (VerifyRenderPassContext& context, size_t)
5184 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5186 const size_t offset = descriptorSetNdx * MAX_UNIFORM_BUFFER_SIZE;
5187 const size_t size = (size_t)(m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5188 ? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5189 : (size_t)MAX_UNIFORM_BUFFER_SIZE);
5190 const size_t count = size / 2;
5192 for (size_t pos = 0; pos < count; pos++)
5194 const deUint8 x = context.getReference().get(offset + pos * 2);
5195 const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5197 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5202 class RenderVertexUniformTexelBuffer : public RenderPassCommand
5205 RenderVertexUniformTexelBuffer (void) {}
5206 ~RenderVertexUniformTexelBuffer (void);
5208 const char* getName (void) const { return "RenderVertexUniformTexelBuffer"; }
5209 void logPrepare (TestLog&, size_t) const;
5210 void logSubmit (TestLog&, size_t) const;
5211 void prepare (PrepareRenderPassContext&);
5212 void submit (SubmitContext& context);
5213 void verify (VerifyRenderPassContext&, size_t);
5216 PipelineResources m_resources;
5217 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5218 vector<vk::VkDescriptorSet> m_descriptorSets;
5219 vector<vk::VkBufferView> m_bufferViews;
5221 const vk::DeviceInterface* m_vkd;
5222 vk::VkDevice m_device;
5223 vk::VkDeviceSize m_bufferSize;
5224 deUint32 m_maxUniformTexelCount;
5227 RenderVertexUniformTexelBuffer::~RenderVertexUniformTexelBuffer (void)
5229 for (size_t bufferViewNdx = 0; bufferViewNdx < m_bufferViews.size(); bufferViewNdx++)
5231 if (!!m_bufferViews[bufferViewNdx])
5233 m_vkd->destroyBufferView(m_device, m_bufferViews[bufferViewNdx], DE_NULL);
5234 m_bufferViews[bufferViewNdx] = (vk::VkBufferView)0;
5239 void RenderVertexUniformTexelBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5241 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
5244 void RenderVertexUniformTexelBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5246 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
5249 void RenderVertexUniformTexelBuffer::prepare (PrepareRenderPassContext& context)
5251 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
5252 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
5253 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5254 const vk::VkDevice device = context.getContext().getDevice();
5255 const vk::VkRenderPass renderPass = context.getRenderPass();
5256 const deUint32 subpass = 0;
5257 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-texel-buffer.vert"), 0));
5258 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5259 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5263 m_bufferSize = context.getBufferSize();
5264 m_maxUniformTexelCount = vk::getPhysicalDeviceProperties(vki, physicalDevice).limits.maxTexelBufferElements;
5267 const vk::VkDescriptorSetLayoutBinding binding =
5270 vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5272 vk::VK_SHADER_STAGE_VERTEX_BIT,
5276 bindings.push_back(binding);
5279 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5280 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5283 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)m_maxUniformTexelCount * 2));
5284 const vk::VkDescriptorPoolSize poolSizes =
5286 vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5289 const vk::VkDescriptorPoolCreateInfo createInfo =
5291 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5293 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5300 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5301 m_descriptorSets.resize(descriptorCount, (vk::VkDescriptorSet)0);
5302 m_bufferViews.resize(descriptorCount, (vk::VkBufferView)0);
5305 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5307 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5308 ? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5309 : m_maxUniformTexelCount * 2) / 2;
5310 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5311 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5313 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5321 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5324 const vk::VkBufferViewCreateInfo createInfo =
5326 vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5330 context.getBuffer(),
5331 vk::VK_FORMAT_R16_UINT,
5332 descriptorSetNdx * m_maxUniformTexelCount * 2,
5336 VK_CHECK(vkd.createBufferView(device, &createInfo, DE_NULL, &m_bufferViews[descriptorSetNdx]));
5340 const vk::VkWriteDescriptorSet write =
5342 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5344 m_descriptorSets[descriptorSetNdx],
5348 vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5351 &m_bufferViews[descriptorSetNdx]
5354 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5359 void RenderVertexUniformTexelBuffer::submit (SubmitContext& context)
5361 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5362 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5364 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5366 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5368 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5369 ? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5370 : m_maxUniformTexelCount * 2) / 2;
5372 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5373 vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5377 void RenderVertexUniformTexelBuffer::verify (VerifyRenderPassContext& context, size_t)
5379 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5381 const size_t offset = descriptorSetNdx * m_maxUniformTexelCount * 2;
5382 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5383 ? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5384 : m_maxUniformTexelCount * 2) / 2;
5386 for (size_t pos = 0; pos < (size_t)count; pos++)
5388 const deUint8 x = context.getReference().get(offset + pos * 2);
5389 const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5391 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5396 class RenderVertexStorageBuffer : public RenderPassCommand
5399 RenderVertexStorageBuffer (void) {}
5400 ~RenderVertexStorageBuffer (void);
5402 const char* getName (void) const { return "RenderVertexStorageBuffer"; }
5403 void logPrepare (TestLog&, size_t) const;
5404 void logSubmit (TestLog&, size_t) const;
5405 void prepare (PrepareRenderPassContext&);
5406 void submit (SubmitContext& context);
5407 void verify (VerifyRenderPassContext&, size_t);
5410 PipelineResources m_resources;
5411 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5412 vector<vk::VkDescriptorSet> m_descriptorSets;
5414 vk::VkDeviceSize m_bufferSize;
5417 RenderVertexStorageBuffer::~RenderVertexStorageBuffer (void)
5421 void RenderVertexStorageBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5423 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as storage buffer." << TestLog::EndMessage;
5426 void RenderVertexStorageBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5428 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
5431 void RenderVertexStorageBuffer::prepare (PrepareRenderPassContext& context)
5433 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5434 const vk::VkDevice device = context.getContext().getDevice();
5435 const vk::VkRenderPass renderPass = context.getRenderPass();
5436 const deUint32 subpass = 0;
5437 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-buffer.vert"), 0));
5438 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5439 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5441 m_bufferSize = context.getBufferSize();
5444 const vk::VkDescriptorSetLayoutBinding binding =
5447 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5449 vk::VK_SHADER_STAGE_VERTEX_BIT,
5453 bindings.push_back(binding);
5456 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5457 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5460 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_STORAGE_BUFFER_SIZE));
5461 const vk::VkDescriptorPoolSize poolSizes =
5463 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5466 const vk::VkDescriptorPoolCreateInfo createInfo =
5468 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5470 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5477 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5478 m_descriptorSets.resize(descriptorCount);
5481 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5483 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5484 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5486 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5494 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5497 const vk::VkDescriptorBufferInfo bufferInfo =
5499 context.getBuffer(),
5500 descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE,
5501 de::min(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE, (vk::VkDeviceSize)MAX_STORAGE_BUFFER_SIZE)
5503 const vk::VkWriteDescriptorSet write =
5505 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5507 m_descriptorSets[descriptorSetNdx],
5511 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5517 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5522 void RenderVertexStorageBuffer::submit (SubmitContext& context)
5524 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5525 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5527 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5529 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5531 const size_t size = m_bufferSize < (descriptorSetNdx + 1) * MAX_STORAGE_BUFFER_SIZE
5532 ? (size_t)(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE)
5533 : (size_t)(MAX_STORAGE_BUFFER_SIZE);
5535 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5536 vkd.cmdDraw(commandBuffer, (deUint32)(size / 2), 1, 0, 0);
5540 void RenderVertexStorageBuffer::verify (VerifyRenderPassContext& context, size_t)
5542 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5544 const size_t offset = descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE;
5545 const size_t size = m_bufferSize < (descriptorSetNdx + 1) * MAX_STORAGE_BUFFER_SIZE
5546 ? (size_t)(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE)
5547 : (size_t)(MAX_STORAGE_BUFFER_SIZE);
5549 for (size_t pos = 0; pos < size / 2; pos++)
5551 const deUint8 x = context.getReference().get(offset + pos * 2);
5552 const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5554 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5559 class RenderVertexStorageTexelBuffer : public RenderPassCommand
5562 RenderVertexStorageTexelBuffer (void) {}
5563 ~RenderVertexStorageTexelBuffer (void);
5565 const char* getName (void) const { return "RenderVertexStorageTexelBuffer"; }
5566 void logPrepare (TestLog&, size_t) const;
5567 void logSubmit (TestLog&, size_t) const;
5568 void prepare (PrepareRenderPassContext&);
5569 void submit (SubmitContext& context);
5570 void verify (VerifyRenderPassContext&, size_t);
5573 PipelineResources m_resources;
5574 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5575 vector<vk::VkDescriptorSet> m_descriptorSets;
5576 vector<vk::VkBufferView> m_bufferViews;
5578 const vk::DeviceInterface* m_vkd;
5579 vk::VkDevice m_device;
5580 vk::VkDeviceSize m_bufferSize;
5581 deUint32 m_maxStorageTexelCount;
5584 RenderVertexStorageTexelBuffer::~RenderVertexStorageTexelBuffer (void)
5586 for (size_t bufferViewNdx = 0; bufferViewNdx < m_bufferViews.size(); bufferViewNdx++)
5588 if (!!m_bufferViews[bufferViewNdx])
5590 m_vkd->destroyBufferView(m_device, m_bufferViews[bufferViewNdx], DE_NULL);
5591 m_bufferViews[bufferViewNdx] = (vk::VkBufferView)0;
5596 void RenderVertexStorageTexelBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5598 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as storage buffer." << TestLog::EndMessage;
5601 void RenderVertexStorageTexelBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5603 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
5606 void RenderVertexStorageTexelBuffer::prepare (PrepareRenderPassContext& context)
5608 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
5609 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
5610 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5611 const vk::VkDevice device = context.getContext().getDevice();
5612 const vk::VkRenderPass renderPass = context.getRenderPass();
5613 const deUint32 subpass = 0;
5614 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-texel-buffer.vert"), 0));
5615 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5616 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5620 m_bufferSize = context.getBufferSize();
5621 m_maxStorageTexelCount = vk::getPhysicalDeviceProperties(vki, physicalDevice).limits.maxTexelBufferElements;
5624 const vk::VkDescriptorSetLayoutBinding binding =
5627 vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
5629 vk::VK_SHADER_STAGE_VERTEX_BIT,
5633 bindings.push_back(binding);
5636 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5637 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5640 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)m_maxStorageTexelCount * 4));
5641 const vk::VkDescriptorPoolSize poolSizes =
5643 vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
5646 const vk::VkDescriptorPoolCreateInfo createInfo =
5648 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5650 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5657 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5658 m_descriptorSets.resize(descriptorCount, (vk::VkDescriptorSet)0);
5659 m_bufferViews.resize(descriptorCount, (vk::VkBufferView)0);
5662 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5664 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5665 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5667 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5675 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5678 const vk::VkBufferViewCreateInfo createInfo =
5680 vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5684 context.getBuffer(),
5685 vk::VK_FORMAT_R32_UINT,
5686 descriptorSetNdx * m_maxStorageTexelCount * 4,
5687 (deUint32)de::min<vk::VkDeviceSize>(m_maxStorageTexelCount * 4, m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4)
5690 VK_CHECK(vkd.createBufferView(device, &createInfo, DE_NULL, &m_bufferViews[descriptorSetNdx]));
5694 const vk::VkWriteDescriptorSet write =
5696 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5698 m_descriptorSets[descriptorSetNdx],
5702 vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
5705 &m_bufferViews[descriptorSetNdx]
5708 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5713 void RenderVertexStorageTexelBuffer::submit (SubmitContext& context)
5715 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5716 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5718 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5720 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5722 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxStorageTexelCount * 4
5723 ? m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4
5724 : m_maxStorageTexelCount * 4) / 2;
5726 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5727 vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5731 void RenderVertexStorageTexelBuffer::verify (VerifyRenderPassContext& context, size_t)
5733 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5735 const size_t offset = descriptorSetNdx * m_maxStorageTexelCount * 4;
5736 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxStorageTexelCount * 4
5737 ? m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4
5738 : m_maxStorageTexelCount * 4) / 2;
5740 DE_ASSERT(context.getReference().getSize() <= 4 * m_maxStorageTexelCount * m_descriptorSets.size());
5741 DE_ASSERT(context.getReference().getSize() > offset);
5742 DE_ASSERT(offset + count * 2 <= context.getReference().getSize());
5744 for (size_t pos = 0; pos < (size_t)count; pos++)
5746 const deUint8 x = context.getReference().get(offset + pos * 2);
5747 const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5749 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5754 class RenderVertexStorageImage : public RenderPassCommand
5757 RenderVertexStorageImage (void) {}
5758 ~RenderVertexStorageImage (void);
5760 const char* getName (void) const { return "RenderVertexStorageImage"; }
5761 void logPrepare (TestLog&, size_t) const;
5762 void logSubmit (TestLog&, size_t) const;
5763 void prepare (PrepareRenderPassContext&);
5764 void submit (SubmitContext& context);
5765 void verify (VerifyRenderPassContext&, size_t);
5768 PipelineResources m_resources;
5769 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5770 vk::Move<vk::VkDescriptorSet> m_descriptorSet;
5771 vk::Move<vk::VkImageView> m_imageView;
5774 RenderVertexStorageImage::~RenderVertexStorageImage (void)
5778 void RenderVertexStorageImage::logPrepare (TestLog& log, size_t commandIndex) const
5780 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render storage image." << TestLog::EndMessage;
5783 void RenderVertexStorageImage::logSubmit (TestLog& log, size_t commandIndex) const
5785 log << TestLog::Message << commandIndex << ":" << getName() << " Render using storage image." << TestLog::EndMessage;
5788 void RenderVertexStorageImage::prepare (PrepareRenderPassContext& context)
5790 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5791 const vk::VkDevice device = context.getContext().getDevice();
5792 const vk::VkRenderPass renderPass = context.getRenderPass();
5793 const deUint32 subpass = 0;
5794 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-image.vert"), 0));
5795 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5796 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5799 const vk::VkDescriptorSetLayoutBinding binding =
5802 vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
5804 vk::VK_SHADER_STAGE_VERTEX_BIT,
5808 bindings.push_back(binding);
5811 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5812 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5815 const vk::VkDescriptorPoolSize poolSizes =
5817 vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
5820 const vk::VkDescriptorPoolCreateInfo createInfo =
5822 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5824 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5831 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5835 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5836 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5838 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5846 m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
5849 const vk::VkImageViewCreateInfo createInfo =
5851 vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
5856 vk::VK_IMAGE_VIEW_TYPE_2D,
5857 vk::VK_FORMAT_R8G8B8A8_UNORM,
5858 vk::makeComponentMappingRGBA(),
5860 vk::VK_IMAGE_ASPECT_COLOR_BIT,
5868 m_imageView = vk::createImageView(vkd, device, &createInfo);
5872 const vk::VkDescriptorImageInfo imageInfo =
5876 vk::VK_IMAGE_LAYOUT_GENERAL
5878 const vk::VkWriteDescriptorSet write =
5880 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5886 vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
5892 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5897 void RenderVertexStorageImage::submit (SubmitContext& context)
5899 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5900 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5902 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5904 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &(*m_descriptorSet), 0u, DE_NULL);
5905 vkd.cmdDraw(commandBuffer, context.getImageWidth() * context.getImageHeight() * 2, 1, 0, 0);
5908 void RenderVertexStorageImage::verify (VerifyRenderPassContext& context, size_t)
5910 for (int pos = 0; pos < (int)(context.getReferenceImage().getWidth() * context.getReferenceImage().getHeight() * 2); pos++)
5912 const tcu::IVec3 size = context.getReferenceImage().getAccess().getSize();
5913 const tcu::UVec4 pixel = context.getReferenceImage().getAccess().getPixelUint((pos / 2) / size.x(), (pos / 2) % size.x());
5916 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.x(), pixel.y());
5918 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.z(), pixel.w());
5922 class RenderVertexSampledImage : public RenderPassCommand
5925 RenderVertexSampledImage (void) {}
5926 ~RenderVertexSampledImage (void);
5928 const char* getName (void) const { return "RenderVertexSampledImage"; }
5929 void logPrepare (TestLog&, size_t) const;
5930 void logSubmit (TestLog&, size_t) const;
5931 void prepare (PrepareRenderPassContext&);
5932 void submit (SubmitContext& context);
5933 void verify (VerifyRenderPassContext&, size_t);
5936 PipelineResources m_resources;
5937 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5938 vk::Move<vk::VkDescriptorSet> m_descriptorSet;
5939 vk::Move<vk::VkImageView> m_imageView;
5940 vk::Move<vk::VkSampler> m_sampler;
5943 RenderVertexSampledImage::~RenderVertexSampledImage (void)
5947 void RenderVertexSampledImage::logPrepare (TestLog& log, size_t commandIndex) const
5949 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render sampled image." << TestLog::EndMessage;
5952 void RenderVertexSampledImage::logSubmit (TestLog& log, size_t commandIndex) const
5954 log << TestLog::Message << commandIndex << ":" << getName() << " Render using sampled image." << TestLog::EndMessage;
5957 void RenderVertexSampledImage::prepare (PrepareRenderPassContext& context)
5959 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5960 const vk::VkDevice device = context.getContext().getDevice();
5961 const vk::VkRenderPass renderPass = context.getRenderPass();
5962 const deUint32 subpass = 0;
5963 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("sampled-image.vert"), 0));
5964 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5965 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5968 const vk::VkDescriptorSetLayoutBinding binding =
5971 vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
5973 vk::VK_SHADER_STAGE_VERTEX_BIT,
5977 bindings.push_back(binding);
5980 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5981 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5984 const vk::VkDescriptorPoolSize poolSizes =
5986 vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
5989 const vk::VkDescriptorPoolCreateInfo createInfo =
5991 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5993 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6000 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6004 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
6005 const vk::VkDescriptorSetAllocateInfo allocateInfo =
6007 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6015 m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
6018 const vk::VkImageViewCreateInfo createInfo =
6020 vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
6025 vk::VK_IMAGE_VIEW_TYPE_2D,
6026 vk::VK_FORMAT_R8G8B8A8_UNORM,
6027 vk::makeComponentMappingRGBA(),
6029 vk::VK_IMAGE_ASPECT_COLOR_BIT,
6037 m_imageView = vk::createImageView(vkd, device, &createInfo);
6041 const vk::VkSamplerCreateInfo createInfo =
6043 vk::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
6047 vk::VK_FILTER_NEAREST,
6048 vk::VK_FILTER_NEAREST,
6050 vk::VK_SAMPLER_MIPMAP_MODE_LINEAR,
6051 vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
6052 vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
6053 vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
6058 vk::VK_COMPARE_OP_ALWAYS,
6061 vk::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
6065 m_sampler = vk::createSampler(vkd, device, &createInfo);
6069 const vk::VkDescriptorImageInfo imageInfo =
6073 vk::VK_IMAGE_LAYOUT_GENERAL
6075 const vk::VkWriteDescriptorSet write =
6077 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6083 vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
6089 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6094 void RenderVertexSampledImage::submit (SubmitContext& context)
6096 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
6097 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
6099 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6101 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &(*m_descriptorSet), 0u, DE_NULL);
6102 vkd.cmdDraw(commandBuffer, context.getImageWidth() * context.getImageHeight() * 2, 1, 0, 0);
6105 void RenderVertexSampledImage::verify (VerifyRenderPassContext& context, size_t)
6107 for (int pos = 0; pos < (int)(context.getReferenceImage().getWidth() * context.getReferenceImage().getHeight() * 2); pos++)
6109 const tcu::IVec3 size = context.getReferenceImage().getAccess().getSize();
6110 const tcu::UVec4 pixel = context.getReferenceImage().getAccess().getPixelUint((pos / 2) / size.x(), (pos / 2) % size.x());
6113 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.x(), pixel.y());
6115 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.z(), pixel.w());
6119 class RenderFragmentUniformBuffer : public RenderPassCommand
6122 RenderFragmentUniformBuffer (void) {}
6123 ~RenderFragmentUniformBuffer (void);
6125 const char* getName (void) const { return "RenderFragmentUniformBuffer"; }
6126 void logPrepare (TestLog&, size_t) const;
6127 void logSubmit (TestLog&, size_t) const;
6128 void prepare (PrepareRenderPassContext&);
6129 void submit (SubmitContext& context);
6130 void verify (VerifyRenderPassContext&, size_t);
6133 PipelineResources m_resources;
6134 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
6135 vector<vk::VkDescriptorSet> m_descriptorSets;
6137 vk::VkDeviceSize m_bufferSize;
6138 size_t m_targetWidth;
6139 size_t m_targetHeight;
6142 RenderFragmentUniformBuffer::~RenderFragmentUniformBuffer (void)
6146 void RenderFragmentUniformBuffer::logPrepare (TestLog& log, size_t commandIndex) const
6148 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
6151 void RenderFragmentUniformBuffer::logSubmit (TestLog& log, size_t commandIndex) const
6153 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
6156 void RenderFragmentUniformBuffer::prepare (PrepareRenderPassContext& context)
6158 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
6159 const vk::VkDevice device = context.getContext().getDevice();
6160 const vk::VkRenderPass renderPass = context.getRenderPass();
6161 const deUint32 subpass = 0;
6162 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
6163 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-buffer.frag"), 0));
6164 vector<vk::VkDescriptorSetLayoutBinding> bindings;
6166 m_bufferSize = context.getBufferSize();
6167 m_targetWidth = context.getTargetWidth();
6168 m_targetHeight = context.getTargetHeight();
6171 const vk::VkDescriptorSetLayoutBinding binding =
6174 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6176 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6180 bindings.push_back(binding);
6182 const vk::VkPushConstantRange pushConstantRange =
6184 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6189 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
6190 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 1u, &pushConstantRange, m_resources);
6193 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE));
6194 const vk::VkDescriptorPoolSize poolSizes =
6196 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6199 const vk::VkDescriptorPoolCreateInfo createInfo =
6201 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
6203 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6210 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6211 m_descriptorSets.resize(descriptorCount);
6214 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6216 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
6217 const vk::VkDescriptorSetAllocateInfo allocateInfo =
6219 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6227 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
6230 const vk::VkDescriptorBufferInfo bufferInfo =
6232 context.getBuffer(),
6233 (vk::VkDeviceSize)(descriptorSetNdx * (size_t)MAX_UNIFORM_BUFFER_SIZE),
6234 m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
6235 ? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
6236 : (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
6238 const vk::VkWriteDescriptorSet write =
6240 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6242 m_descriptorSets[descriptorSetNdx],
6246 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6252 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6257 void RenderFragmentUniformBuffer::submit (SubmitContext& context)
6259 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
6260 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
6262 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6264 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6268 const deUint32 callId;
6269 const deUint32 valuesPerPixel;
6271 (deUint32)descriptorSetNdx,
6272 (deUint32)divRoundUp<size_t>(m_descriptorSets.size() * (MAX_UNIFORM_BUFFER_SIZE / 4), m_targetWidth * m_targetHeight)
6275 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
6276 vkd.cmdPushConstants(commandBuffer, *m_resources.pipelineLayout, vk::VK_SHADER_STAGE_FRAGMENT_BIT, 0u, (deUint32)sizeof(callParams), &callParams);
6277 vkd.cmdDraw(commandBuffer, 6, 1, 0, 0);
6281 void RenderFragmentUniformBuffer::verify (VerifyRenderPassContext& context, size_t)
6283 const deUint32 valuesPerPixel = (deUint32)divRoundUp<size_t>(m_descriptorSets.size() * (MAX_UNIFORM_BUFFER_SIZE / 4), m_targetWidth * m_targetHeight);
6284 const size_t arraySize = MAX_UNIFORM_BUFFER_SIZE / (sizeof(deUint32) * 4);
6285 const size_t arrayIntSize = arraySize * 4;
6287 for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
6288 for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
6290 const size_t firstDescriptorSetNdx = de::min<size_t>((y * 256u + x) / (arrayIntSize / valuesPerPixel), m_descriptorSets.size() - 1);
6292 for (size_t descriptorSetNdx = firstDescriptorSetNdx; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6294 const size_t offset = descriptorSetNdx * MAX_UNIFORM_BUFFER_SIZE;
6295 const deUint32 callId = (deUint32)descriptorSetNdx;
6297 const deUint32 id = callId * ((deUint32)arrayIntSize / valuesPerPixel) + (deUint32)y * 256u + (deUint32)x;
6299 if (y * 256u + x < callId * (arrayIntSize / valuesPerPixel))
6303 deUint32 value = id;
6305 for (deUint32 i = 0; i < valuesPerPixel; i++)
6307 value = ((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 0))
6308 | (((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 1)) << 8u)
6309 | (((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 2)) << 16u)
6310 | (((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 3)) << 24u);
6313 const UVec4 vec ((value >> 0u) & 0xFFu,
6314 (value >> 8u) & 0xFFu,
6315 (value >> 16u) & 0xFFu,
6316 (value >> 24u) & 0xFFu);
6318 context.getReferenceTarget().getAccess().setPixel(vec.asFloat() / Vec4(255.0f), x, y);
6324 class RenderFragmentStorageBuffer : public RenderPassCommand
6327 RenderFragmentStorageBuffer (void) {}
6328 ~RenderFragmentStorageBuffer (void);
6330 const char* getName (void) const { return "RenderFragmentStorageBuffer"; }
6331 void logPrepare (TestLog&, size_t) const;
6332 void logSubmit (TestLog&, size_t) const;
6333 void prepare (PrepareRenderPassContext&);
6334 void submit (SubmitContext& context);
6335 void verify (VerifyRenderPassContext&, size_t);
6338 PipelineResources m_resources;
6339 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
6340 vk::Move<vk::VkDescriptorSet> m_descriptorSet;
6342 vk::VkDeviceSize m_bufferSize;
6343 size_t m_targetWidth;
6344 size_t m_targetHeight;
6347 RenderFragmentStorageBuffer::~RenderFragmentStorageBuffer (void)
6351 void RenderFragmentStorageBuffer::logPrepare (TestLog& log, size_t commandIndex) const
6353 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline to render buffer as storage buffer." << TestLog::EndMessage;
6356 void RenderFragmentStorageBuffer::logSubmit (TestLog& log, size_t commandIndex) const
6358 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
6361 void RenderFragmentStorageBuffer::prepare (PrepareRenderPassContext& context)
6363 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
6364 const vk::VkDevice device = context.getContext().getDevice();
6365 const vk::VkRenderPass renderPass = context.getRenderPass();
6366 const deUint32 subpass = 0;
6367 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
6368 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-buffer.frag"), 0));
6369 vector<vk::VkDescriptorSetLayoutBinding> bindings;
6371 m_bufferSize = context.getBufferSize();
6372 m_targetWidth = context.getTargetWidth();
6373 m_targetHeight = context.getTargetHeight();
6376 const vk::VkDescriptorSetLayoutBinding binding =
6379 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6381 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6385 bindings.push_back(binding);
6387 const vk::VkPushConstantRange pushConstantRange =
6389 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6394 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
6395 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 1u, &pushConstantRange, m_resources);
6398 const deUint32 descriptorCount = 1;
6399 const vk::VkDescriptorPoolSize poolSizes =
6401 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6404 const vk::VkDescriptorPoolCreateInfo createInfo =
6406 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
6408 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6415 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6419 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
6420 const vk::VkDescriptorSetAllocateInfo allocateInfo =
6422 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6430 m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
6433 const vk::VkDescriptorBufferInfo bufferInfo =
6435 context.getBuffer(),
6439 const vk::VkWriteDescriptorSet write =
6441 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6443 m_descriptorSet.get(),
6447 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6453 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6458 void RenderFragmentStorageBuffer::submit (SubmitContext& context)
6460 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
6461 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
6463 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6467 const deUint32 valuesPerPixel;
6468 const deUint32 bufferSize;
6470 (deUint32)divRoundUp<vk::VkDeviceSize>(m_bufferSize / 4, m_targetWidth * m_targetHeight),
6471 (deUint32)m_bufferSize
6474 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
6475 vkd.cmdPushConstants(commandBuffer, *m_resources.pipelineLayout, vk::VK_SHADER_STAGE_FRAGMENT_BIT, 0u, (deUint32)sizeof(callParams), &callParams);
6476 vkd.cmdDraw(commandBuffer, 6, 1, 0, 0);
6479 void RenderFragmentStorageBuffer::verify (VerifyRenderPassContext& context, size_t)
6481 const deUint32 valuesPerPixel = (deUint32)divRoundUp<vk::VkDeviceSize>(m_bufferSize / 4, m_targetWidth * m_targetHeight);
6483 for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
6484 for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
6486 const deUint32 id = (deUint32)y * 256u + (deUint32)x;
6488 deUint32 value = id;
6490 for (deUint32 i = 0; i < valuesPerPixel; i++)
6492 value = (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 0)) << 0u)
6493 | (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 1)) << 8u)
6494 | (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 2)) << 16u)
6495 | (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 3)) << 24u);
6498 const UVec4 vec ((value >> 0u) & 0xFFu,
6499 (value >> 8u) & 0xFFu,
6500 (value >> 16u) & 0xFFu,
6501 (value >> 24u) & 0xFFu);
6503 context.getReferenceTarget().getAccess().setPixel(vec.asFloat() / Vec4(255.0f), x, y);
6521 OP_BUFFER_BINDMEMORY,
6523 OP_QUEUE_WAIT_FOR_IDLE,
6524 OP_DEVICE_WAIT_FOR_IDLE,
6526 OP_COMMAND_BUFFER_BEGIN,
6527 OP_COMMAND_BUFFER_END,
6529 // Buffer transfer operations
6533 OP_BUFFER_COPY_TO_BUFFER,
6534 OP_BUFFER_COPY_FROM_BUFFER,
6536 OP_BUFFER_COPY_TO_IMAGE,
6537 OP_BUFFER_COPY_FROM_IMAGE,
6541 OP_IMAGE_BINDMEMORY,
6543 OP_IMAGE_TRANSITION_LAYOUT,
6545 OP_IMAGE_COPY_TO_BUFFER,
6546 OP_IMAGE_COPY_FROM_BUFFER,
6548 OP_IMAGE_COPY_TO_IMAGE,
6549 OP_IMAGE_COPY_FROM_IMAGE,
6551 OP_IMAGE_BLIT_TO_IMAGE,
6552 OP_IMAGE_BLIT_FROM_IMAGE,
6556 OP_PIPELINE_BARRIER_GLOBAL,
6557 OP_PIPELINE_BARRIER_BUFFER,
6558 OP_PIPELINE_BARRIER_IMAGE,
6560 // Renderpass operations
6561 OP_RENDERPASS_BEGIN,
6564 // Commands inside render pass
6565 OP_RENDER_VERTEX_BUFFER,
6566 OP_RENDER_INDEX_BUFFER,
6568 OP_RENDER_VERTEX_UNIFORM_BUFFER,
6569 OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER,
6571 OP_RENDER_VERTEX_STORAGE_BUFFER,
6572 OP_RENDER_FRAGMENT_STORAGE_BUFFER,
6574 OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER,
6576 OP_RENDER_VERTEX_STORAGE_IMAGE,
6577 OP_RENDER_VERTEX_SAMPLED_IMAGE,
6579 OP_RENDER_FRAGMENT_UNIFORM_BUFFER,
6585 STAGE_COMMAND_BUFFER,
6590 vk::VkAccessFlags getWriteAccessFlags (void)
6592 return vk::VK_ACCESS_SHADER_WRITE_BIT
6593 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
6594 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
6595 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
6596 | vk::VK_ACCESS_HOST_WRITE_BIT
6597 | vk::VK_ACCESS_MEMORY_WRITE_BIT;
6600 bool isWriteAccess (vk::VkAccessFlagBits access)
6602 return (getWriteAccessFlags() & access) != 0;
6608 CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
6610 bool isValid (vk::VkPipelineStageFlagBits stage,
6611 vk::VkAccessFlagBits access) const;
6613 void perform (vk::VkPipelineStageFlagBits stage,
6614 vk::VkAccessFlagBits access);
6616 void submitCommandBuffer (void);
6617 void waitForIdle (void);
6619 void getFullBarrier (vk::VkPipelineStageFlags& srcStages,
6620 vk::VkAccessFlags& srcAccesses,
6621 vk::VkPipelineStageFlags& dstStages,
6622 vk::VkAccessFlags& dstAccesses) const;
6624 void barrier (vk::VkPipelineStageFlags srcStages,
6625 vk::VkAccessFlags srcAccesses,
6626 vk::VkPipelineStageFlags dstStages,
6627 vk::VkAccessFlags dstAccesses);
6629 void imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
6630 vk::VkAccessFlags srcAccesses,
6631 vk::VkPipelineStageFlags dstStages,
6632 vk::VkAccessFlags dstAccesses);
6634 void checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
6635 vk::VkAccessFlags srcAccesses,
6636 vk::VkPipelineStageFlags dstStages,
6637 vk::VkAccessFlags dstAccesses);
6639 // Everything is clean and there is no need for barriers
6640 bool isClean (void) const;
6642 vk::VkPipelineStageFlags getAllowedStages (void) const { return m_allowedStages; }
6643 vk::VkAccessFlags getAllowedAcceses (void) const { return m_allowedAccesses; }
6645 // Limit which stages and accesses are used by the CacheState tracker
6646 const vk::VkPipelineStageFlags m_allowedStages;
6647 const vk::VkAccessFlags m_allowedAccesses;
6649 // [dstStage][srcStage] = srcAccesses
6650 // In stage dstStage write srcAccesses from srcStage are not yet available
6651 vk::VkAccessFlags m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
6652 // Latest pipeline transition is not available in stage
6653 bool m_unavailableLayoutTransition[PIPELINESTAGE_LAST];
6654 // [dstStage] = dstAccesses
6655 // In stage dstStage ops with dstAccesses are not yet visible
6656 vk::VkAccessFlags m_invisibleOperations[PIPELINESTAGE_LAST];
6658 // [dstStage] = srcStage
6659 // Memory operation in srcStage have not completed before dstStage
6660 vk::VkPipelineStageFlags m_incompleteOperations[PIPELINESTAGE_LAST];
6663 CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
6664 : m_allowedStages (allowedStages)
6665 , m_allowedAccesses (allowedAccesses)
6667 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
6669 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6671 if ((dstStage_ & m_allowedStages) == 0)
6674 // All operations are initially visible
6675 m_invisibleOperations[dstStage] = 0;
6677 // There are no incomplete read operations initially
6678 m_incompleteOperations[dstStage] = 0;
6680 // There are no incomplete layout transitions
6681 m_unavailableLayoutTransition[dstStage] = false;
6683 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
6685 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6687 if ((srcStage_ & m_allowedStages) == 0)
6690 // There are no write operations that are not yet available
6692 m_unavailableWriteOperations[dstStage][srcStage] = 0;
6697 bool CacheState::isValid (vk::VkPipelineStageFlagBits stage,
6698 vk::VkAccessFlagBits access) const
6700 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
6701 DE_ASSERT((stage & (~m_allowedStages)) == 0);
6703 const PipelineStage dstStage = pipelineStageFlagToPipelineStage(stage);
6705 // Previous operations are not visible to access on stage
6706 if (m_unavailableLayoutTransition[dstStage] || (m_invisibleOperations[dstStage] & access) != 0)
6709 if (isWriteAccess(access))
6711 // Memory operations from other stages have not completed before
6713 if (m_incompleteOperations[dstStage] != 0)
6720 void CacheState::perform (vk::VkPipelineStageFlagBits stage,
6721 vk::VkAccessFlagBits access)
6723 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
6724 DE_ASSERT((stage & (~m_allowedStages)) == 0);
6726 const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
6728 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
6730 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6732 if ((dstStage_ & m_allowedStages) == 0)
6735 // Mark stage as incomplete for all stages
6736 m_incompleteOperations[dstStage] |= stage;
6738 if (isWriteAccess(access))
6740 // Mark all accesses from all stages invisible
6741 m_invisibleOperations[dstStage] |= m_allowedAccesses;
6743 // Mark write access from srcStage unavailable to all stages
6744 m_unavailableWriteOperations[dstStage][srcStage] |= access;
6749 void CacheState::submitCommandBuffer (void)
6751 // Flush all host writes and reads
6752 barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
6753 m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
6758 void CacheState::waitForIdle (void)
6760 // Make all writes available
6761 barrier(m_allowedStages,
6762 m_allowedAccesses & getWriteAccessFlags(),
6766 // Make all writes visible on device side
6767 barrier(m_allowedStages,
6769 m_allowedStages & (~vk::VK_PIPELINE_STAGE_HOST_BIT),
6773 void CacheState::getFullBarrier (vk::VkPipelineStageFlags& srcStages,
6774 vk::VkAccessFlags& srcAccesses,
6775 vk::VkPipelineStageFlags& dstStages,
6776 vk::VkAccessFlags& dstAccesses) const
6783 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
6785 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6787 if ((dstStage_ & m_allowedStages) == 0)
6790 // Make sure all previous operation are complete in all stages
6791 if (m_incompleteOperations[dstStage])
6793 dstStages |= dstStage_;
6794 srcStages |= m_incompleteOperations[dstStage];
6797 // Make sure all read operations are visible in dstStage
6798 if (m_invisibleOperations[dstStage])
6800 dstStages |= dstStage_;
6801 dstAccesses |= m_invisibleOperations[dstStage];
6804 // Make sure all write operations fro mall stages are available
6805 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
6807 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6809 if ((srcStage_ & m_allowedStages) == 0)
6812 if (m_unavailableWriteOperations[dstStage][srcStage])
6814 dstStages |= dstStage_;
6815 srcStages |= dstStage_;
6816 srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
6819 if (m_unavailableLayoutTransition[dstStage] && !m_unavailableLayoutTransition[srcStage])
6821 // Add dependency between srcStage and dstStage if layout transition has not completed in dstStage,
6822 // but has completed in srcStage.
6823 dstStages |= dstStage_;
6824 srcStages |= dstStage_;
6829 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
6830 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
6831 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
6832 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
6835 void CacheState::checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
6836 vk::VkAccessFlags srcAccesses,
6837 vk::VkPipelineStageFlags dstStages,
6838 vk::VkAccessFlags dstAccesses)
6840 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
6841 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
6842 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
6843 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
6845 DE_UNREF(srcStages);
6846 DE_UNREF(srcAccesses);
6848 DE_UNREF(dstStages);
6849 DE_UNREF(dstAccesses);
6851 #if defined(DE_DEBUG)
6852 // Check that all stages have completed before srcStages or are in srcStages.
6854 vk::VkPipelineStageFlags completedStages = srcStages;
6856 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
6858 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6860 if ((srcStage_ & srcStages) == 0)
6863 completedStages |= (~m_incompleteOperations[srcStage]);
6866 DE_ASSERT((completedStages & m_allowedStages) == m_allowedStages);
6869 // Check that any write is available at least in one stage. Since all stages are complete even single flush is enough.
6870 if ((getWriteAccessFlags() & m_allowedAccesses) != 0 && (srcAccesses & getWriteAccessFlags()) == 0)
6872 bool anyWriteAvailable = false;
6874 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
6876 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6878 if ((dstStage_ & m_allowedStages) == 0)
6881 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
6883 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6885 if ((srcStage_ & m_allowedStages) == 0)
6888 if (m_unavailableWriteOperations[dstStage][srcStage] != (getWriteAccessFlags() & m_allowedAccesses))
6890 anyWriteAvailable = true;
6896 DE_ASSERT(anyWriteAvailable);
6901 void CacheState::imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
6902 vk::VkAccessFlags srcAccesses,
6903 vk::VkPipelineStageFlags dstStages,
6904 vk::VkAccessFlags dstAccesses)
6906 checkImageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
6908 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
6910 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6912 if ((dstStage_ & m_allowedStages) == 0)
6915 // All stages are incomplete after the barrier except each dstStage in it self.
6916 m_incompleteOperations[dstStage] = m_allowedStages & (~dstStage_);
6918 // All memory operations are invisible unless they are listed in dstAccess
6919 m_invisibleOperations[dstStage] = m_allowedAccesses & (~dstAccesses);
6921 // Layout transition is unavailable in stage unless it was listed in dstStages
6922 m_unavailableLayoutTransition[dstStage]= (dstStage_ & dstStages) == 0;
6924 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
6926 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6928 if ((srcStage_ & m_allowedStages) == 0)
6931 // All write operations are available after layout transition
6932 m_unavailableWriteOperations[dstStage][srcStage] = 0;
6937 void CacheState::barrier (vk::VkPipelineStageFlags srcStages,
6938 vk::VkAccessFlags srcAccesses,
6939 vk::VkPipelineStageFlags dstStages,
6940 vk::VkAccessFlags dstAccesses)
6942 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
6943 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
6944 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
6945 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
6949 vk::VkPipelineStageFlags oldIncompleteOperations[PIPELINESTAGE_LAST];
6950 vk::VkAccessFlags oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
6951 bool oldUnavailableLayoutTransition[PIPELINESTAGE_LAST];
6953 deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
6954 deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
6955 deMemcpy(oldUnavailableLayoutTransition, m_unavailableLayoutTransition, sizeof(oldUnavailableLayoutTransition));
6957 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
6959 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6961 if ((srcStage_ & srcStages) == 0)
6964 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
6966 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6968 if ((dstStage_ & dstStages) == 0)
6971 // Stages that have completed before srcStage have also completed before dstStage
6972 m_incompleteOperations[dstStage] &= oldIncompleteOperations[srcStage];
6974 // Image layout transition in srcStage are now available in dstStage
6975 m_unavailableLayoutTransition[dstStage] &= oldUnavailableLayoutTransition[srcStage];
6977 for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
6979 const PipelineStage sharedStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
6981 if ((sharedStage_ & m_allowedStages) == 0)
6984 // Writes that are available in srcStage are also available in dstStage
6985 m_unavailableWriteOperations[dstStage][sharedStage] &= oldUnavailableWriteOperations[srcStage][sharedStage];
6992 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
6994 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6995 bool allWritesAvailable = true;
6997 if ((dstStage_ & dstStages) == 0)
7000 // Operations in srcStages have completed before any stage in dstStages
7001 m_incompleteOperations[dstStage] &= ~srcStages;
7003 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7005 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7007 if ((srcStage_ & m_allowedStages) == 0)
7010 // Make srcAccesses from srcStage available in dstStage
7011 if ((srcStage_ & srcStages) != 0)
7012 m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
7014 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
7015 allWritesAvailable = false;
7018 // If all writes are available in dstStage make dstAccesses also visible
7019 if (allWritesAvailable)
7020 m_invisibleOperations[dstStage] &= ~dstAccesses;
7024 bool CacheState::isClean (void) const
7026 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
7028 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7030 if ((dstStage_ & m_allowedStages) == 0)
7033 // Some operations are not visible to some stages
7034 if (m_invisibleOperations[dstStage] != 0)
7037 // There are operation that have not completed yet
7038 if (m_incompleteOperations[dstStage] != 0)
7041 // Layout transition has not completed yet
7042 if (m_unavailableLayoutTransition[dstStage])
7045 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7047 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7049 if ((srcStage_ & m_allowedStages) == 0)
7052 // Some write operations are not available yet
7053 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
7061 bool layoutSupportedByUsage (Usage usage, vk::VkImageLayout layout)
7065 case vk::VK_IMAGE_LAYOUT_GENERAL:
7068 case vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
7069 return (usage & USAGE_COLOR_ATTACHMENT) != 0;
7071 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
7072 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
7074 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
7075 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
7077 case vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
7078 // \todo [2016-03-09 mika] Should include input attachment
7079 return (usage & USAGE_SAMPLED_IMAGE) != 0;
7081 case vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
7082 return (usage & USAGE_TRANSFER_SRC) != 0;
7084 case vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
7085 return (usage & USAGE_TRANSFER_DST) != 0;
7087 case vk::VK_IMAGE_LAYOUT_PREINITIALIZED:
7091 DE_FATAL("Unknown layout");
7096 size_t getNumberOfSupportedLayouts (Usage usage)
7098 const vk::VkImageLayout layouts[] =
7100 vk::VK_IMAGE_LAYOUT_GENERAL,
7101 vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
7102 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
7103 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
7104 vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
7105 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
7106 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
7108 size_t supportedLayoutCount = 0;
7110 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
7112 const vk::VkImageLayout layout = layouts[layoutNdx];
7114 if (layoutSupportedByUsage(usage, layout))
7115 supportedLayoutCount++;
7118 return supportedLayoutCount;
7121 vk::VkImageLayout getRandomNextLayout (de::Random& rng,
7123 vk::VkImageLayout previousLayout)
7125 const vk::VkImageLayout layouts[] =
7127 vk::VK_IMAGE_LAYOUT_GENERAL,
7128 vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
7129 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
7130 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
7131 vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
7132 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
7133 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
7135 const size_t supportedLayoutCount = getNumberOfSupportedLayouts(usage);
7137 DE_ASSERT(supportedLayoutCount > 0);
7139 size_t nextLayoutNdx = ((size_t)rng.getUint64()) % (previousLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED
7140 ? supportedLayoutCount
7141 : supportedLayoutCount - 1);
7143 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
7145 const vk::VkImageLayout layout = layouts[layoutNdx];
7147 if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
7149 if (nextLayoutNdx == 0)
7156 DE_FATAL("Unreachable");
7157 return vk::VK_IMAGE_LAYOUT_UNDEFINED;
7162 State (Usage usage, deUint32 seed)
7163 : stage (STAGE_HOST)
7164 , cache (usageToStageFlags(usage), usageToAccessFlags(usage))
7167 , hostInvalidated (true)
7168 , hostFlushed (true)
7169 , memoryDefined (false)
7171 , hasBoundBufferMemory (false)
7173 , hasBoundImageMemory (false)
7174 , imageLayout (vk::VK_IMAGE_LAYOUT_UNDEFINED)
7175 , imageDefined (false)
7178 , commandBufferIsEmpty (true)
7179 , renderPassIsEmpty (true)
7188 bool hostInvalidated;
7193 bool hasBoundBufferMemory;
7196 bool hasBoundImageMemory;
7197 vk::VkImageLayout imageLayout;
7203 bool commandBufferIsEmpty;
7204 bool renderPassIsEmpty;
7207 void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
7209 if (state.stage == STAGE_HOST)
7211 if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
7213 // Host memory operations
7216 ops.push_back(OP_UNMAP);
7218 // Avoid flush and finish if they are not needed
7219 if (!state.hostFlushed)
7220 ops.push_back(OP_MAP_FLUSH);
7222 if (!state.hostInvalidated
7224 && ((usage & USAGE_HOST_READ) == 0
7225 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
7226 && ((usage & USAGE_HOST_WRITE) == 0
7227 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
7229 ops.push_back(OP_MAP_INVALIDATE);
7232 if (usage & USAGE_HOST_READ
7233 && usage & USAGE_HOST_WRITE
7234 && state.memoryDefined
7235 && state.hostInvalidated
7237 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
7238 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
7240 ops.push_back(OP_MAP_MODIFY);
7243 if (usage & USAGE_HOST_READ
7244 && state.memoryDefined
7245 && state.hostInvalidated
7247 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
7249 ops.push_back(OP_MAP_READ);
7252 if (usage & USAGE_HOST_WRITE
7253 && state.hostInvalidated
7255 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
7257 ops.push_back(OP_MAP_WRITE);
7261 ops.push_back(OP_MAP);
7264 if (state.hasBoundBufferMemory && state.queueIdle)
7266 // \note Destroy only buffers after they have been bound
7267 ops.push_back(OP_BUFFER_DESTROY);
7271 if (state.hasBuffer)
7273 if (!state.hasBoundBufferMemory)
7274 ops.push_back(OP_BUFFER_BINDMEMORY);
7276 else if (!state.hasImage && supportsBuffers) // Avoid creating buffer if there is already image
7277 ops.push_back(OP_BUFFER_CREATE);
7280 if (state.hasBoundImageMemory && state.queueIdle)
7282 // \note Destroy only image after they have been bound
7283 ops.push_back(OP_IMAGE_DESTROY);
7289 if (!state.hasBoundImageMemory)
7290 ops.push_back(OP_IMAGE_BINDMEMORY);
7292 else if (!state.hasBuffer && supportsImages) // Avoid creating image if there is already buffer
7293 ops.push_back(OP_IMAGE_CREATE);
7296 // Host writes must be flushed before GPU commands and there must be
7297 // buffer or image for GPU commands
7298 if (state.hostFlushed
7299 && (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
7300 && (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
7301 && (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
7303 ops.push_back(OP_COMMAND_BUFFER_BEGIN);
7306 if (!state.deviceIdle)
7307 ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
7309 if (!state.queueIdle)
7310 ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
7312 else if (state.stage == STAGE_COMMAND_BUFFER)
7314 if (!state.cache.isClean())
7316 ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
7319 ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
7321 if (state.hasBuffer)
7322 ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
7325 if (state.hasBoundBufferMemory)
7327 if (usage & USAGE_TRANSFER_DST
7328 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
7330 ops.push_back(OP_BUFFER_FILL);
7331 ops.push_back(OP_BUFFER_UPDATE);
7332 ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
7333 ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
7336 if (usage & USAGE_TRANSFER_SRC
7337 && state.memoryDefined
7338 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
7340 ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
7341 ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
7345 if (state.hasBoundImageMemory
7346 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED
7347 || getNumberOfSupportedLayouts(usage) > 1))
7349 ops.push_back(OP_IMAGE_TRANSITION_LAYOUT);
7352 if (usage & USAGE_TRANSFER_DST
7353 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
7354 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
7355 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
7357 ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
7358 ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
7359 ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
7362 if (usage & USAGE_TRANSFER_SRC
7363 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
7364 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
7365 && state.imageDefined
7366 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
7368 ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
7369 ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
7370 ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
7375 // \todo [2016-03-09 mika] Add other usages?
7376 if ((state.memoryDefined
7377 && state.hasBoundBufferMemory
7378 && (((usage & USAGE_VERTEX_BUFFER)
7379 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
7380 || ((usage & USAGE_INDEX_BUFFER)
7381 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
7382 || ((usage & USAGE_UNIFORM_BUFFER)
7383 && (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT)
7384 || state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT)))
7385 || ((usage & USAGE_UNIFORM_TEXEL_BUFFER)
7386 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
7387 || ((usage & USAGE_STORAGE_BUFFER)
7388 && (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)
7389 || state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)))
7390 || ((usage & USAGE_STORAGE_TEXEL_BUFFER)
7391 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))))
7392 || (state.imageDefined
7393 && state.hasBoundImageMemory
7394 && (((usage & USAGE_STORAGE_IMAGE)
7395 && state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
7396 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
7397 || ((usage & USAGE_SAMPLED_IMAGE)
7398 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
7399 || state.imageLayout == vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
7400 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)))))
7402 ops.push_back(OP_RENDERPASS_BEGIN);
7405 // \note This depends on previous operations and has to be always the
7406 // last command buffer operation check
7407 if (ops.empty() || !state.commandBufferIsEmpty)
7408 ops.push_back(OP_COMMAND_BUFFER_END);
7410 else if (state.stage == STAGE_RENDER_PASS)
7412 if ((usage & USAGE_VERTEX_BUFFER) != 0
7413 && state.memoryDefined
7414 && state.hasBoundBufferMemory
7415 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
7417 ops.push_back(OP_RENDER_VERTEX_BUFFER);
7420 if ((usage & USAGE_INDEX_BUFFER) != 0
7421 && state.memoryDefined
7422 && state.hasBoundBufferMemory
7423 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
7425 ops.push_back(OP_RENDER_INDEX_BUFFER);
7428 if ((usage & USAGE_UNIFORM_BUFFER) != 0
7429 && state.memoryDefined
7430 && state.hasBoundBufferMemory)
7432 if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
7433 ops.push_back(OP_RENDER_VERTEX_UNIFORM_BUFFER);
7435 if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
7436 ops.push_back(OP_RENDER_FRAGMENT_UNIFORM_BUFFER);
7439 if ((usage & USAGE_UNIFORM_TEXEL_BUFFER) != 0
7440 && state.memoryDefined
7441 && state.hasBoundBufferMemory
7442 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
7444 ops.push_back(OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER);
7447 if ((usage & USAGE_STORAGE_BUFFER) != 0
7448 && state.memoryDefined
7449 && state.hasBoundBufferMemory)
7451 if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
7452 ops.push_back(OP_RENDER_VERTEX_STORAGE_BUFFER);
7454 if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
7455 ops.push_back(OP_RENDER_FRAGMENT_STORAGE_BUFFER);
7458 if ((usage & USAGE_STORAGE_TEXEL_BUFFER) != 0
7459 && state.memoryDefined
7460 && state.hasBoundBufferMemory
7461 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
7463 ops.push_back(OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER);
7466 if ((usage & USAGE_STORAGE_IMAGE) != 0
7467 && state.imageDefined
7468 && state.hasBoundImageMemory
7469 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL)
7470 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
7472 ops.push_back(OP_RENDER_VERTEX_STORAGE_IMAGE);
7475 if ((usage & USAGE_SAMPLED_IMAGE) != 0
7476 && state.imageDefined
7477 && state.hasBoundImageMemory
7478 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
7479 || state.imageLayout == vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
7480 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
7482 ops.push_back(OP_RENDER_VERTEX_SAMPLED_IMAGE);
7485 if (!state.renderPassIsEmpty)
7486 ops.push_back(OP_RENDERPASS_END);
7489 DE_FATAL("Unknown stage");
7492 void applyOp (State& state, const Memory& memory, Op op, Usage usage)
7497 DE_ASSERT(state.stage == STAGE_HOST);
7498 DE_ASSERT(!state.mapped);
7499 state.mapped = true;
7503 DE_ASSERT(state.stage == STAGE_HOST);
7504 DE_ASSERT(state.mapped);
7505 state.mapped = false;
7509 DE_ASSERT(state.stage == STAGE_HOST);
7510 DE_ASSERT(!state.hostFlushed);
7511 state.hostFlushed = true;
7514 case OP_MAP_INVALIDATE:
7515 DE_ASSERT(state.stage == STAGE_HOST);
7516 DE_ASSERT(!state.hostInvalidated);
7517 state.hostInvalidated = true;
7521 DE_ASSERT(state.stage == STAGE_HOST);
7522 DE_ASSERT(state.hostInvalidated);
7523 state.rng.getUint32();
7527 DE_ASSERT(state.stage == STAGE_HOST);
7528 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
7529 state.hostFlushed = false;
7531 state.memoryDefined = true;
7532 state.imageDefined = false;
7533 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
7534 state.rng.getUint32();
7538 DE_ASSERT(state.stage == STAGE_HOST);
7539 DE_ASSERT(state.hostInvalidated);
7541 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
7542 state.hostFlushed = false;
7544 state.rng.getUint32();
7547 case OP_BUFFER_CREATE:
7548 DE_ASSERT(state.stage == STAGE_HOST);
7549 DE_ASSERT(!state.hasBuffer);
7551 state.hasBuffer = true;
7554 case OP_BUFFER_DESTROY:
7555 DE_ASSERT(state.stage == STAGE_HOST);
7556 DE_ASSERT(state.hasBuffer);
7557 DE_ASSERT(state.hasBoundBufferMemory);
7559 state.hasBuffer = false;
7560 state.hasBoundBufferMemory = false;
7563 case OP_BUFFER_BINDMEMORY:
7564 DE_ASSERT(state.stage == STAGE_HOST);
7565 DE_ASSERT(state.hasBuffer);
7566 DE_ASSERT(!state.hasBoundBufferMemory);
7568 state.hasBoundBufferMemory = true;
7571 case OP_IMAGE_CREATE:
7572 DE_ASSERT(state.stage == STAGE_HOST);
7573 DE_ASSERT(!state.hasImage);
7574 DE_ASSERT(!state.hasBuffer);
7576 state.hasImage = true;
7579 case OP_IMAGE_DESTROY:
7580 DE_ASSERT(state.stage == STAGE_HOST);
7581 DE_ASSERT(state.hasImage);
7582 DE_ASSERT(state.hasBoundImageMemory);
7584 state.hasImage = false;
7585 state.hasBoundImageMemory = false;
7586 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
7587 state.imageDefined = false;
7590 case OP_IMAGE_BINDMEMORY:
7591 DE_ASSERT(state.stage == STAGE_HOST);
7592 DE_ASSERT(state.hasImage);
7593 DE_ASSERT(!state.hasBoundImageMemory);
7595 state.hasBoundImageMemory = true;
7598 case OP_IMAGE_TRANSITION_LAYOUT:
7600 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7601 DE_ASSERT(state.hasImage);
7602 DE_ASSERT(state.hasBoundImageMemory);
7604 // \todo [2016-03-09 mika] Support linear tiling and predefined data
7605 const vk::VkImageLayout srcLayout = state.rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
7606 const vk::VkImageLayout dstLayout = getRandomNextLayout(state.rng, usage, srcLayout);
7608 vk::VkPipelineStageFlags dirtySrcStages;
7609 vk::VkAccessFlags dirtySrcAccesses;
7610 vk::VkPipelineStageFlags dirtyDstStages;
7611 vk::VkAccessFlags dirtyDstAccesses;
7613 vk::VkPipelineStageFlags srcStages;
7614 vk::VkAccessFlags srcAccesses;
7615 vk::VkPipelineStageFlags dstStages;
7616 vk::VkAccessFlags dstAccesses;
7618 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
7620 // Try masking some random bits
7621 srcStages = dirtySrcStages;
7622 srcAccesses = dirtySrcAccesses;
7624 dstStages = state.cache.getAllowedStages() & state.rng.getUint32();
7625 dstAccesses = state.cache.getAllowedAcceses() & state.rng.getUint32();
7627 // If there are no bits in dst stage mask use all stages
7628 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
7631 srcStages = dstStages;
7633 if (srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
7634 state.imageDefined = false;
7636 state.commandBufferIsEmpty = false;
7637 state.imageLayout = dstLayout;
7638 state.memoryDefined = false;
7639 state.cache.imageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
7643 case OP_QUEUE_WAIT_FOR_IDLE:
7644 DE_ASSERT(state.stage == STAGE_HOST);
7645 DE_ASSERT(!state.queueIdle);
7647 state.queueIdle = true;
7649 state.cache.waitForIdle();
7652 case OP_DEVICE_WAIT_FOR_IDLE:
7653 DE_ASSERT(state.stage == STAGE_HOST);
7654 DE_ASSERT(!state.deviceIdle);
7656 state.queueIdle = true;
7657 state.deviceIdle = true;
7659 state.cache.waitForIdle();
7662 case OP_COMMAND_BUFFER_BEGIN:
7663 DE_ASSERT(state.stage == STAGE_HOST);
7664 state.stage = STAGE_COMMAND_BUFFER;
7665 state.commandBufferIsEmpty = true;
7666 // Makes host writes visible to command buffer
7667 state.cache.submitCommandBuffer();
7670 case OP_COMMAND_BUFFER_END:
7671 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7672 state.stage = STAGE_HOST;
7673 state.queueIdle = false;
7674 state.deviceIdle = false;
7677 case OP_BUFFER_COPY_FROM_BUFFER:
7678 case OP_BUFFER_COPY_FROM_IMAGE:
7679 case OP_BUFFER_UPDATE:
7680 case OP_BUFFER_FILL:
7681 state.rng.getUint32();
7682 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7684 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
7685 state.hostInvalidated = false;
7687 state.commandBufferIsEmpty = false;
7688 state.memoryDefined = true;
7689 state.imageDefined = false;
7690 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
7691 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
7694 case OP_BUFFER_COPY_TO_BUFFER:
7695 case OP_BUFFER_COPY_TO_IMAGE:
7696 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7698 state.commandBufferIsEmpty = false;
7699 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
7702 case OP_IMAGE_BLIT_FROM_IMAGE:
7703 state.rng.getBool();
7705 case OP_IMAGE_COPY_FROM_BUFFER:
7706 case OP_IMAGE_COPY_FROM_IMAGE:
7707 state.rng.getUint32();
7708 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7710 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
7711 state.hostInvalidated = false;
7713 state.commandBufferIsEmpty = false;
7714 state.memoryDefined = false;
7715 state.imageDefined = true;
7716 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
7719 case OP_IMAGE_BLIT_TO_IMAGE:
7720 state.rng.getBool();
7722 case OP_IMAGE_COPY_TO_BUFFER:
7723 case OP_IMAGE_COPY_TO_IMAGE:
7724 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7726 state.commandBufferIsEmpty = false;
7727 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
7730 case OP_PIPELINE_BARRIER_GLOBAL:
7731 case OP_PIPELINE_BARRIER_BUFFER:
7732 case OP_PIPELINE_BARRIER_IMAGE:
7734 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7736 vk::VkPipelineStageFlags dirtySrcStages;
7737 vk::VkAccessFlags dirtySrcAccesses;
7738 vk::VkPipelineStageFlags dirtyDstStages;
7739 vk::VkAccessFlags dirtyDstAccesses;
7741 vk::VkPipelineStageFlags srcStages;
7742 vk::VkAccessFlags srcAccesses;
7743 vk::VkPipelineStageFlags dstStages;
7744 vk::VkAccessFlags dstAccesses;
7746 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
7748 // Try masking some random bits
7749 srcStages = dirtySrcStages & state.rng.getUint32();
7750 srcAccesses = dirtySrcAccesses & state.rng.getUint32();
7752 dstStages = dirtyDstStages & state.rng.getUint32();
7753 dstAccesses = dirtyDstAccesses & state.rng.getUint32();
7755 // If there are no bits in stage mask use the original dirty stages
7756 srcStages = srcStages ? srcStages : dirtySrcStages;
7757 dstStages = dstStages ? dstStages : dirtyDstStages;
7760 srcStages = dstStages;
7762 state.commandBufferIsEmpty = false;
7763 state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
7767 case OP_RENDERPASS_BEGIN:
7769 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7771 state.renderPassIsEmpty = true;
7772 state.stage = STAGE_RENDER_PASS;
7776 case OP_RENDERPASS_END:
7778 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7780 state.renderPassIsEmpty = true;
7781 state.stage = STAGE_COMMAND_BUFFER;
7785 case OP_RENDER_VERTEX_BUFFER:
7787 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7789 state.renderPassIsEmpty = false;
7790 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
7794 case OP_RENDER_INDEX_BUFFER:
7796 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7798 state.renderPassIsEmpty = false;
7799 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
7803 case OP_RENDER_VERTEX_UNIFORM_BUFFER:
7804 case OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER:
7806 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7808 state.renderPassIsEmpty = false;
7809 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT);
7813 case OP_RENDER_FRAGMENT_UNIFORM_BUFFER:
7815 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7817 state.renderPassIsEmpty = false;
7818 state.cache.perform(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT);
7822 case OP_RENDER_VERTEX_STORAGE_BUFFER:
7823 case OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER:
7825 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7827 state.renderPassIsEmpty = false;
7828 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
7832 case OP_RENDER_FRAGMENT_STORAGE_BUFFER:
7834 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7836 state.renderPassIsEmpty = false;
7837 state.cache.perform(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
7841 case OP_RENDER_VERTEX_STORAGE_IMAGE:
7842 case OP_RENDER_VERTEX_SAMPLED_IMAGE:
7844 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
7846 state.renderPassIsEmpty = false;
7847 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
7852 DE_FATAL("Unknown op");
7856 de::MovePtr<Command> createHostCommand (Op op,
7859 vk::VkSharingMode sharing)
7863 case OP_MAP: return de::MovePtr<Command>(new Map());
7864 case OP_UNMAP: return de::MovePtr<Command>(new UnMap());
7866 case OP_MAP_FLUSH: return de::MovePtr<Command>(new Flush());
7867 case OP_MAP_INVALIDATE: return de::MovePtr<Command>(new Invalidate());
7869 case OP_MAP_READ: return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
7870 case OP_MAP_WRITE: return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
7871 case OP_MAP_MODIFY: return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
7873 case OP_BUFFER_CREATE: return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
7874 case OP_BUFFER_DESTROY: return de::MovePtr<Command>(new DestroyBuffer());
7875 case OP_BUFFER_BINDMEMORY: return de::MovePtr<Command>(new BindBufferMemory());
7877 case OP_IMAGE_CREATE: return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
7878 case OP_IMAGE_DESTROY: return de::MovePtr<Command>(new DestroyImage());
7879 case OP_IMAGE_BINDMEMORY: return de::MovePtr<Command>(new BindImageMemory());
7881 case OP_QUEUE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new QueueWaitIdle());
7882 case OP_DEVICE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new DeviceWaitIdle());
7885 DE_FATAL("Unknown op");
7886 return de::MovePtr<Command>(DE_NULL);
7890 de::MovePtr<CmdCommand> createCmdCommand (de::Random& rng,
7897 case OP_BUFFER_FILL: return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
7898 case OP_BUFFER_UPDATE: return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
7899 case OP_BUFFER_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
7900 case OP_BUFFER_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
7902 case OP_BUFFER_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyToImage());
7903 case OP_BUFFER_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
7905 case OP_IMAGE_TRANSITION_LAYOUT:
7907 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
7908 DE_ASSERT(state.hasImage);
7909 DE_ASSERT(state.hasBoundImageMemory);
7911 const vk::VkImageLayout srcLayout = rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
7912 const vk::VkImageLayout dstLayout = getRandomNextLayout(rng, usage, srcLayout);
7914 vk::VkPipelineStageFlags dirtySrcStages;
7915 vk::VkAccessFlags dirtySrcAccesses;
7916 vk::VkPipelineStageFlags dirtyDstStages;
7917 vk::VkAccessFlags dirtyDstAccesses;
7919 vk::VkPipelineStageFlags srcStages;
7920 vk::VkAccessFlags srcAccesses;
7921 vk::VkPipelineStageFlags dstStages;
7922 vk::VkAccessFlags dstAccesses;
7924 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
7926 // Try masking some random bits
7927 srcStages = dirtySrcStages;
7928 srcAccesses = dirtySrcAccesses;
7930 dstStages = state.cache.getAllowedStages() & rng.getUint32();
7931 dstAccesses = state.cache.getAllowedAcceses() & rng.getUint32();
7933 // If there are no bits in dst stage mask use all stages
7934 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
7937 srcStages = dstStages;
7939 return de::MovePtr<CmdCommand>(new ImageTransition(srcStages, srcAccesses, dstStages, dstAccesses, srcLayout, dstLayout));
7942 case OP_IMAGE_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyToBuffer(state.imageLayout));
7943 case OP_IMAGE_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32(), state.imageLayout));
7944 case OP_IMAGE_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyToImage(state.imageLayout));
7945 case OP_IMAGE_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32(), state.imageLayout));
7946 case OP_IMAGE_BLIT_TO_IMAGE:
7948 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
7949 return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale, state.imageLayout));
7952 case OP_IMAGE_BLIT_FROM_IMAGE:
7954 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
7955 return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale, state.imageLayout));
7958 case OP_PIPELINE_BARRIER_GLOBAL:
7959 case OP_PIPELINE_BARRIER_BUFFER:
7960 case OP_PIPELINE_BARRIER_IMAGE:
7962 vk::VkPipelineStageFlags dirtySrcStages;
7963 vk::VkAccessFlags dirtySrcAccesses;
7964 vk::VkPipelineStageFlags dirtyDstStages;
7965 vk::VkAccessFlags dirtyDstAccesses;
7967 vk::VkPipelineStageFlags srcStages;
7968 vk::VkAccessFlags srcAccesses;
7969 vk::VkPipelineStageFlags dstStages;
7970 vk::VkAccessFlags dstAccesses;
7972 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
7974 // Try masking some random bits
7975 srcStages = dirtySrcStages & rng.getUint32();
7976 srcAccesses = dirtySrcAccesses & rng.getUint32();
7978 dstStages = dirtyDstStages & rng.getUint32();
7979 dstAccesses = dirtyDstAccesses & rng.getUint32();
7981 // If there are no bits in stage mask use the original dirty stages
7982 srcStages = srcStages ? srcStages : dirtySrcStages;
7983 dstStages = dstStages ? dstStages : dirtyDstStages;
7986 srcStages = dstStages;
7988 PipelineBarrier::Type type;
7990 if (op == OP_PIPELINE_BARRIER_IMAGE)
7991 type = PipelineBarrier::TYPE_IMAGE;
7992 else if (op == OP_PIPELINE_BARRIER_BUFFER)
7993 type = PipelineBarrier::TYPE_BUFFER;
7994 else if (op == OP_PIPELINE_BARRIER_GLOBAL)
7995 type = PipelineBarrier::TYPE_GLOBAL;
7998 type = PipelineBarrier::TYPE_LAST;
7999 DE_FATAL("Unknown op");
8002 if (type == PipelineBarrier::TYPE_IMAGE)
8003 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::just(state.imageLayout)));
8005 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::nothing<vk::VkImageLayout>()));
8009 DE_FATAL("Unknown op");
8010 return de::MovePtr<CmdCommand>(DE_NULL);
8014 de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
8020 case OP_RENDER_VERTEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexBuffer());
8021 case OP_RENDER_INDEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderIndexBuffer());
8023 case OP_RENDER_VERTEX_UNIFORM_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexUniformBuffer());
8024 case OP_RENDER_FRAGMENT_UNIFORM_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderFragmentUniformBuffer());
8026 case OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexUniformTexelBuffer());
8028 case OP_RENDER_VERTEX_STORAGE_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexStorageBuffer());
8029 case OP_RENDER_FRAGMENT_STORAGE_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderFragmentStorageBuffer());
8031 case OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexStorageTexelBuffer());
8032 case OP_RENDER_VERTEX_STORAGE_IMAGE: return de::MovePtr<RenderPassCommand>(new RenderVertexStorageImage());
8033 case OP_RENDER_VERTEX_SAMPLED_IMAGE: return de::MovePtr<RenderPassCommand>(new RenderVertexSampledImage());
8036 DE_FATAL("Unknown op");
8037 return de::MovePtr<RenderPassCommand>(DE_NULL);
8041 de::MovePtr<CmdCommand> createRenderPassCommands (const Memory& memory,
8042 de::Random& nextOpRng,
8048 vector<RenderPassCommand*> commands;
8052 for (; opNdx < opCount; opNdx++)
8056 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
8058 DE_ASSERT(!ops.empty());
8061 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
8063 if (op == OP_RENDERPASS_END)
8069 de::Random rng (state.rng);
8071 commands.push_back(createRenderPassCommand(rng, state, op).release());
8072 applyOp(state, memory, op, usage);
8074 DE_ASSERT(state.rng == rng);
8079 applyOp(state, memory, OP_RENDERPASS_END, usage);
8080 return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
8084 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
8085 delete commands[commandNdx];
8091 de::MovePtr<Command> createCmdCommands (const Memory& memory,
8092 de::Random& nextOpRng,
8098 vector<CmdCommand*> commands;
8102 for (; opNdx < opCount; opNdx++)
8106 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
8108 DE_ASSERT(!ops.empty());
8111 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
8113 if (op == OP_COMMAND_BUFFER_END)
8119 // \note Command needs to known the state before the operation
8120 if (op == OP_RENDERPASS_BEGIN)
8122 applyOp(state, memory, op, usage);
8123 commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
8127 de::Random rng (state.rng);
8129 commands.push_back(createCmdCommand(rng, state, op, usage).release());
8130 applyOp(state, memory, op, usage);
8132 DE_ASSERT(state.rng == rng);
8139 applyOp(state, memory, OP_COMMAND_BUFFER_END, usage);
8140 return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
8144 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
8145 delete commands[commandNdx];
8151 void createCommands (vector<Command*>& commands,
8153 const Memory& memory,
8155 vk::VkSharingMode sharingMode,
8158 State state (usage, seed);
8159 // Used to select next operation only
8160 de::Random nextOpRng (seed ^ 12930809);
8162 commands.reserve(opCount);
8164 for (size_t opNdx = 0; opNdx < opCount; opNdx++)
8168 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
8170 DE_ASSERT(!ops.empty());
8173 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
8175 if (op == OP_COMMAND_BUFFER_BEGIN)
8177 applyOp(state, memory, op, usage);
8178 commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
8182 de::Random rng (state.rng);
8184 commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
8185 applyOp(state, memory, op, usage);
8187 // Make sure that random generator is in sync
8188 DE_ASSERT(state.rng == rng);
8193 // Clean up resources
8194 if (state.hasBuffer && state.hasImage)
8196 if (!state.queueIdle)
8197 commands.push_back(new QueueWaitIdle());
8199 if (state.hasBuffer)
8200 commands.push_back(new DestroyBuffer());
8203 commands.push_back(new DestroyImage());
8207 class MemoryTestInstance : public TestInstance
8211 typedef bool(MemoryTestInstance::*StageFunc)(void);
8213 MemoryTestInstance (::vkt::Context& context, const TestConfig& config);
8214 ~MemoryTestInstance (void);
8216 tcu::TestStatus iterate (void);
8219 const TestConfig m_config;
8220 const size_t m_iterationCount;
8221 const size_t m_opCount;
8222 const vk::VkPhysicalDeviceMemoryProperties m_memoryProperties;
8223 deUint32 m_memoryTypeNdx;
8226 tcu::ResultCollector m_resultCollector;
8228 vector<Command*> m_commands;
8229 MovePtr<Memory> m_memory;
8230 MovePtr<Context> m_renderContext;
8231 MovePtr<PrepareContext> m_prepareContext;
8233 bool nextIteration (void);
8234 bool nextMemoryType (void);
8236 bool createCommandsAndAllocateMemory (void);
8237 bool prepare (void);
8238 bool execute (void);
8240 void resetResources (void);
8243 void MemoryTestInstance::resetResources (void)
8245 const vk::DeviceInterface& vkd = m_context.getDeviceInterface();
8246 const vk::VkDevice device = m_context.getDevice();
8248 VK_CHECK(vkd.deviceWaitIdle(device));
8250 for (size_t commandNdx = 0; commandNdx < m_commands.size(); commandNdx++)
8252 delete m_commands[commandNdx];
8253 m_commands[commandNdx] = DE_NULL;
8257 m_prepareContext.clear();
8261 bool MemoryTestInstance::nextIteration (void)
8265 if (m_iteration < m_iterationCount)
8268 m_stage = &MemoryTestInstance::createCommandsAndAllocateMemory;
8272 return nextMemoryType();
8275 bool MemoryTestInstance::nextMemoryType (void)
8279 DE_ASSERT(m_commands.empty());
8283 if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
8286 m_stage = &MemoryTestInstance::createCommandsAndAllocateMemory;
8297 MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
8298 : TestInstance (context)
8300 , m_iterationCount (5)
8302 , m_memoryProperties (vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
8303 , m_memoryTypeNdx (0)
8305 , m_stage (&MemoryTestInstance::createCommandsAndAllocateMemory)
8306 , m_resultCollector (context.getTestContext().getLog())
8308 , m_memory (DE_NULL)
8310 TestLog& log = context.getTestContext().getLog();
8312 const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
8314 log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
8315 log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
8316 log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
8320 const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
8322 for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
8324 const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
8326 log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
8327 log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
8330 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
8332 const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
8334 log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
8335 log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
8340 const vk::InstanceInterface& vki = context.getInstanceInterface();
8341 const vk::VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
8342 const vk::DeviceInterface& vkd = context.getDeviceInterface();
8343 const vk::VkDevice device = context.getDevice();
8344 const vk::VkQueue queue = context.getUniversalQueue();
8345 const deUint32 queueFamilyIndex = context.getUniversalQueueFamilyIndex();
8346 vector<pair<deUint32, vk::VkQueue> > queues;
8348 queues.push_back(std::make_pair(queueFamilyIndex, queue));
8350 m_renderContext = MovePtr<Context>(new Context(vki, vkd, physicalDevice, device, queue, queueFamilyIndex, queues, context.getBinaryCollection()));
8354 MemoryTestInstance::~MemoryTestInstance (void)
8359 bool MemoryTestInstance::createCommandsAndAllocateMemory (void)
8361 const vk::VkDevice device = m_context.getDevice();
8362 TestLog& log = m_context.getTestContext().getLog();
8363 const vk::InstanceInterface& vki = m_context.getInstanceInterface();
8364 const vk::VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
8365 const vk::DeviceInterface& vkd = m_context.getDeviceInterface();
8366 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
8367 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx) + "CreateCommands" + de::toString(m_iteration),
8368 "Memory type " + de::toString(m_memoryTypeNdx) + " create commands iteration " + de::toString(m_iteration));
8369 const vector<deUint32>& queues = m_renderContext->getQueueFamilies();
8371 DE_ASSERT(m_commands.empty());
8373 if (m_config.usage & (USAGE_HOST_READ | USAGE_HOST_WRITE)
8374 && !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
8376 log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
8378 return nextMemoryType();
8384 const vk::VkBufferUsageFlags bufferUsage = usageToBufferUsageFlags(m_config.usage);
8385 const vk::VkImageUsageFlags imageUsage = usageToImageUsageFlags(m_config.usage);
8386 const vk::VkDeviceSize maxBufferSize = bufferUsage != 0
8387 ? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
8389 const IVec2 maxImageSize = imageUsage != 0
8390 ? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
8393 log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
8394 log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
8396 // Skip tests if there are no supported operations
8397 if (maxBufferSize == 0
8398 && maxImageSize[0] == 0
8399 && (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)) == 0)
8401 log << TestLog::Message << "Skipping memory type. None of the usages are supported." << TestLog::EndMessage;
8403 return nextMemoryType();
8407 const deUint32 seed = 2830980989u ^ deUint32Hash((deUint32)(m_iteration) * m_memoryProperties.memoryTypeCount + m_memoryTypeNdx);
8409 m_memory = MovePtr<Memory>(new Memory(vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, maxBufferSize, maxImageSize[0], maxImageSize[1]));
8411 log << TestLog::Message << "Create commands" << TestLog::EndMessage;
8412 createCommands(m_commands, seed, *m_memory, m_config.usage, m_config.sharing, m_opCount);
8414 m_stage = &MemoryTestInstance::prepare;
8418 catch (const tcu::TestError& e)
8420 m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
8421 return nextMemoryType();
8426 bool MemoryTestInstance::prepare (void)
8428 TestLog& log = m_context.getTestContext().getLog();
8429 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx) + "Prepare" + de::toString(m_iteration),
8430 "Memory type " + de::toString(m_memoryTypeNdx) + " prepare iteration" + de::toString(m_iteration));
8432 m_prepareContext = MovePtr<PrepareContext>(new PrepareContext(*m_renderContext, *m_memory));
8434 DE_ASSERT(!m_commands.empty());
8436 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
8438 Command& command = *m_commands[cmdNdx];
8442 command.prepare(*m_prepareContext);
8444 catch (const tcu::TestError& e)
8446 m_resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare, got exception: " + string(e.getMessage()));
8447 return nextMemoryType();
8451 m_stage = &MemoryTestInstance::execute;
8455 bool MemoryTestInstance::execute (void)
8457 TestLog& log = m_context.getTestContext().getLog();
8458 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx) + "Execute" + de::toString(m_iteration),
8459 "Memory type " + de::toString(m_memoryTypeNdx) + " execute iteration " + de::toString(m_iteration));
8460 ExecuteContext executeContext (*m_renderContext);
8461 const vk::VkDevice device = m_context.getDevice();
8462 const vk::DeviceInterface& vkd = m_context.getDeviceInterface();
8464 DE_ASSERT(!m_commands.empty());
8466 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
8468 Command& command = *m_commands[cmdNdx];
8472 command.execute(executeContext);
8474 catch (const tcu::TestError& e)
8476 m_resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute, got exception: " + string(e.getMessage()));
8477 return nextIteration();
8481 VK_CHECK(vkd.deviceWaitIdle(device));
8483 m_stage = &MemoryTestInstance::verify;
8487 bool MemoryTestInstance::verify (void)
8489 DE_ASSERT(!m_commands.empty());
8491 TestLog& log = m_context.getTestContext().getLog();
8492 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx) + "Verify" + de::toString(m_iteration),
8493 "Memory type " + de::toString(m_memoryTypeNdx) + " verify iteration " + de::toString(m_iteration));
8494 VerifyContext verifyContext (log, m_resultCollector, *m_renderContext, m_config.size);
8496 log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
8498 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
8500 Command& command = *m_commands[cmdNdx];
8504 command.verify(verifyContext, cmdNdx);
8506 catch (const tcu::TestError& e)
8508 m_resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to verify, got exception: " + string(e.getMessage()));
8509 return nextIteration();
8513 return nextIteration();
8516 tcu::TestStatus MemoryTestInstance::iterate (void)
8518 if ((this->*m_stage)())
8519 return tcu::TestStatus::incomplete();
8521 return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
8526 void init (vk::SourceCollections& sources, TestConfig config) const
8528 // Vertex buffer rendering
8529 if (config.usage & USAGE_VERTEX_BUFFER)
8531 const char* const vertexShader =
8533 "layout(location = 0) in highp vec2 a_position;\n"
8534 "void main (void) {\n"
8535 "\tgl_PointSize = 1.0;\n"
8536 "\tgl_Position = vec4(1.998 * a_position - vec2(0.999), 0.0, 1.0);\n"
8539 sources.glslSources.add("vertex-buffer.vert")
8540 << glu::VertexSource(vertexShader);
8543 // Index buffer rendering
8544 if (config.usage & USAGE_INDEX_BUFFER)
8546 const char* const vertexShader =
8549 "void main (void) {\n"
8550 "\tgl_PointSize = 1.0;\n"
8551 "\thighp vec2 pos = vec2(gl_VertexIndex % 256, gl_VertexIndex / 256) / vec2(255.0);\n"
8552 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
8555 sources.glslSources.add("index-buffer.vert")
8556 << glu::VertexSource(vertexShader);
8559 if (config.usage & USAGE_UNIFORM_BUFFER)
8562 std::ostringstream vertexShader;
8567 "layout(set=0, binding=0) uniform Block\n"
8569 "\thighp uvec4 values[" << de::toString<size_t>(MAX_UNIFORM_BUFFER_SIZE / (sizeof(deUint32) * 4)) << "];\n"
8571 "void main (void) {\n"
8572 "\tgl_PointSize = 1.0;\n"
8573 "\thighp uvec4 vecVal = block.values[gl_VertexIndex / 8];\n"
8574 "\thighp uint val;\n"
8575 "\tif (((gl_VertexIndex / 2) % 4 == 0))\n"
8576 "\t\tval = vecVal.x;\n"
8577 "\telse if (((gl_VertexIndex / 2) % 4 == 1))\n"
8578 "\t\tval = vecVal.y;\n"
8579 "\telse if (((gl_VertexIndex / 2) % 4 == 2))\n"
8580 "\t\tval = vecVal.z;\n"
8581 "\telse if (((gl_VertexIndex / 2) % 4 == 3))\n"
8582 "\t\tval = vecVal.w;\n"
8583 "\tif ((gl_VertexIndex % 2) == 0)\n"
8584 "\t\tval = val & 0xFFFFu;\n"
8586 "\t\tval = val >> 16u;\n"
8587 "\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
8588 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
8591 sources.glslSources.add("uniform-buffer.vert")
8592 << glu::VertexSource(vertexShader.str());
8596 const size_t arraySize = MAX_UNIFORM_BUFFER_SIZE / (sizeof(deUint32) * 4);
8597 const size_t arrayIntSize = arraySize * 4;
8598 std::ostringstream fragmentShader;
8603 "layout(location = 0) out highp vec4 o_color;\n"
8604 "layout(set=0, binding=0) uniform Block\n"
8606 "\thighp uvec4 values[" << arraySize << "];\n"
8608 "layout(push_constant) uniform PushC\n"
8611 "\tuint valuesPerPixel;\n"
8613 "void main (void) {\n"
8614 "\thighp uint id = pushC.callId * (" << arrayIntSize << "u / pushC.valuesPerPixel) + uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x);\n"
8615 "\tif (uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x) < pushC.callId * (" << arrayIntSize << "u / pushC.valuesPerPixel))\n"
8617 "\thighp uint value = id;\n"
8618 "\tfor (uint i = 0u; i < pushC.valuesPerPixel; i++)\n"
8620 "\t\thighp uvec4 vecVal = block.values[(value / 4u) % " << arraySize << "u];\n"
8621 "\t\tif ((value % 4u) == 0u)\n"
8622 "\t\t\tvalue = vecVal.x;\n"
8623 "\t\telse if ((value % 4u) == 1u)\n"
8624 "\t\t\tvalue = vecVal.y;\n"
8625 "\t\telse if ((value % 4u) == 2u)\n"
8626 "\t\t\tvalue = vecVal.z;\n"
8627 "\t\telse if ((value % 4u) == 3u)\n"
8628 "\t\t\tvalue = vecVal.w;\n"
8630 "\tuvec4 valueOut = uvec4(value & 0xFFu, (value >> 8u) & 0xFFu, (value >> 16u) & 0xFFu, (value >> 24u) & 0xFFu);\n"
8631 "\to_color = vec4(valueOut) / vec4(255.0);\n"
8634 sources.glslSources.add("uniform-buffer.frag")
8635 << glu::FragmentSource(fragmentShader.str());
8639 if (config.usage & USAGE_STORAGE_BUFFER)
8642 // Vertex storage buffer rendering
8643 const char* const vertexShader =
8646 "layout(set=0, binding=0) buffer Block\n"
8648 "\thighp uvec4 values[];\n"
8650 "void main (void) {\n"
8651 "\tgl_PointSize = 1.0;\n"
8652 "\thighp uvec4 vecVal = block.values[gl_VertexIndex / 8];\n"
8653 "\thighp uint val;\n"
8654 "\tif (((gl_VertexIndex / 2) % 4 == 0))\n"
8655 "\t\tval = vecVal.x;\n"
8656 "\telse if (((gl_VertexIndex / 2) % 4 == 1))\n"
8657 "\t\tval = vecVal.y;\n"
8658 "\telse if (((gl_VertexIndex / 2) % 4 == 2))\n"
8659 "\t\tval = vecVal.z;\n"
8660 "\telse if (((gl_VertexIndex / 2) % 4 == 3))\n"
8661 "\t\tval = vecVal.w;\n"
8662 "\tif ((gl_VertexIndex % 2) == 0)\n"
8663 "\t\tval = val & 0xFFFFu;\n"
8665 "\t\tval = val >> 16u;\n"
8666 "\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
8667 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
8670 sources.glslSources.add("storage-buffer.vert")
8671 << glu::VertexSource(vertexShader);
8675 std::ostringstream fragmentShader;
8680 "layout(location = 0) out highp vec4 o_color;\n"
8681 "layout(set=0, binding=0) buffer Block\n"
8683 "\thighp uvec4 values[];\n"
8685 "layout(push_constant) uniform PushC\n"
8687 "\tuint valuesPerPixel;\n"
8688 "\tuint bufferSize;\n"
8690 "void main (void) {\n"
8691 "\thighp uint arrayIntSize = pushC.bufferSize / 4u;\n"
8692 "\thighp uint id = uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x);\n"
8693 "\thighp uint value = id;\n"
8694 "\tfor (uint i = 0u; i < pushC.valuesPerPixel; i++)\n"
8696 "\t\thighp uvec4 vecVal = block.values[(value / 4u) % (arrayIntSize / 4u)];\n"
8697 "\t\tif ((value % 4u) == 0u)\n"
8698 "\t\t\tvalue = vecVal.x;\n"
8699 "\t\telse if ((value % 4u) == 1u)\n"
8700 "\t\t\tvalue = vecVal.y;\n"
8701 "\t\telse if ((value % 4u) == 2u)\n"
8702 "\t\t\tvalue = vecVal.z;\n"
8703 "\t\telse if ((value % 4u) == 3u)\n"
8704 "\t\t\tvalue = vecVal.w;\n"
8706 "\tuvec4 valueOut = uvec4(value & 0xFFu, (value >> 8u) & 0xFFu, (value >> 16u) & 0xFFu, (value >> 24u) & 0xFFu);\n"
8707 "\to_color = vec4(valueOut) / vec4(255.0);\n"
8710 sources.glslSources.add("storage-buffer.frag")
8711 << glu::FragmentSource(fragmentShader.str());
8715 if (config.usage & USAGE_UNIFORM_TEXEL_BUFFER)
8717 // Vertex uniform texel buffer rendering
8718 const char* const vertexShader =
8720 "#extension GL_EXT_texture_buffer : require\n"
8722 "layout(set=0, binding=0) uniform highp usamplerBuffer u_sampler;\n"
8723 "void main (void) {\n"
8724 "\tgl_PointSize = 1.0;\n"
8725 "\thighp uint val = texelFetch(u_sampler, gl_VertexIndex).x;\n"
8726 "\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
8727 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
8730 sources.glslSources.add("uniform-texel-buffer.vert")
8731 << glu::VertexSource(vertexShader);
8734 if (config.usage & USAGE_STORAGE_TEXEL_BUFFER)
8736 // Vertex storage texel buffer rendering
8737 const char* const vertexShader =
8739 "#extension GL_EXT_texture_buffer : require\n"
8741 "layout(set=0, binding=0, r32ui) uniform readonly highp uimageBuffer u_sampler;\n"
8742 "out gl_PerVertex {\n"
8743 "\tvec4 gl_Position;\n"
8744 "\tfloat gl_PointSize;\n"
8746 "void main (void) {\n"
8747 "\tgl_PointSize = 1.0;\n"
8748 "\thighp uint val = imageLoad(u_sampler, gl_VertexIndex / 2).x;\n"
8749 "\tif (gl_VertexIndex % 2 == 0)\n"
8750 "\t\tval = val & 0xFFFFu;\n"
8752 "\t\tval = val >> 16;\n"
8753 "\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
8754 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
8757 sources.glslSources.add("storage-texel-buffer.vert")
8758 << glu::VertexSource(vertexShader);
8761 if (config.usage & USAGE_STORAGE_IMAGE)
8763 // Vertex storage image
8764 const char* const vertexShader =
8767 "layout(set=0, binding=0, rgba8) uniform image2D u_image;\n"
8768 "out gl_PerVertex {\n"
8769 "\tvec4 gl_Position;\n"
8770 "\tfloat gl_PointSize;\n"
8772 "void main (void) {\n"
8773 "\tgl_PointSize = 1.0;\n"
8774 "\thighp vec4 val = imageLoad(u_image, ivec2((gl_VertexIndex / 2) / imageSize(u_image).x, (gl_VertexIndex / 2) % imageSize(u_image).x));\n"
8775 "\thighp vec2 pos;\n"
8776 "\tif (gl_VertexIndex % 2 == 0)\n"
8777 "\t\tpos = val.xy;\n"
8779 "\t\tpos = val.zw;\n"
8780 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
8783 sources.glslSources.add("storage-image.vert")
8784 << glu::VertexSource(vertexShader);
8787 if (config.usage & USAGE_SAMPLED_IMAGE)
8789 // Vertex storage image
8790 const char* const vertexShader =
8793 "layout(set=0, binding=0) uniform sampler2D u_sampler;\n"
8794 "out gl_PerVertex {\n"
8795 "\tvec4 gl_Position;\n"
8796 "\tfloat gl_PointSize;\n"
8798 "void main (void) {\n"
8799 "\tgl_PointSize = 1.0;\n"
8800 "\thighp vec4 val = texelFetch(u_sampler, ivec2((gl_VertexIndex / 2) / textureSize(u_sampler, 0).x, (gl_VertexIndex / 2) % textureSize(u_sampler, 0).x), 0);\n"
8801 "\thighp vec2 pos;\n"
8802 "\tif (gl_VertexIndex % 2 == 0)\n"
8803 "\t\tpos = val.xy;\n"
8805 "\t\tpos = val.zw;\n"
8806 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
8809 sources.glslSources.add("sampled-image.vert")
8810 << glu::VertexSource(vertexShader);
8814 const char* const vertexShader =
8816 "out gl_PerVertex {\n"
8817 "\tvec4 gl_Position;\n"
8820 "void main (void) {\n"
8821 "\tgl_Position = vec4(((gl_VertexIndex + 2) / 3) % 2 == 0 ? -1.0 : 1.0,\n"
8822 "\t ((gl_VertexIndex + 1) / 3) % 2 == 0 ? -1.0 : 1.0, 0.0, 1.0);\n"
8825 sources.glslSources.add("render-quad.vert")
8826 << glu::VertexSource(vertexShader);
8830 const char* const fragmentShader =
8832 "layout(location = 0) out highp vec4 o_color;\n"
8833 "void main (void) {\n"
8834 "\to_color = vec4(1.0);\n"
8837 sources.glslSources.add("render-white.frag")
8838 << glu::FragmentSource(fragmentShader);
8845 tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
8847 de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
8848 const vk::VkDeviceSize sizes[] =
8855 const Usage usages[] =
8861 USAGE_VERTEX_BUFFER,
8863 USAGE_UNIFORM_BUFFER,
8864 USAGE_UNIFORM_TEXEL_BUFFER,
8865 USAGE_STORAGE_BUFFER,
8866 USAGE_STORAGE_TEXEL_BUFFER,
8867 USAGE_STORAGE_IMAGE,
8870 const Usage readUsages[] =
8874 USAGE_VERTEX_BUFFER,
8876 USAGE_UNIFORM_BUFFER,
8877 USAGE_UNIFORM_TEXEL_BUFFER,
8878 USAGE_STORAGE_BUFFER,
8879 USAGE_STORAGE_TEXEL_BUFFER,
8880 USAGE_STORAGE_IMAGE,
8884 const Usage writeUsages[] =
8890 for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
8892 const Usage writeUsage = writeUsages[writeUsageNdx];
8894 for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
8896 const Usage readUsage = readUsages[readUsageNdx];
8897 const Usage usage = writeUsage | readUsage;
8898 const string usageGroupName (usageToName(usage));
8899 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
8901 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
8903 const vk::VkDeviceSize size = sizes[sizeNdx];
8904 const string testName (de::toString((deUint64)(size)));
8905 const TestConfig config =
8909 vk::VK_SHARING_MODE_EXCLUSIVE
8912 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
8915 group->addChild(usageGroup.get());
8916 usageGroup.release();
8921 Usage all = (Usage)0;
8923 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
8924 all = all | usages[usageNdx];
8927 const string usageGroupName ("all");
8928 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
8930 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
8932 const vk::VkDeviceSize size = sizes[sizeNdx];
8933 const string testName (de::toString((deUint64)(size)));
8934 const TestConfig config =
8938 vk::VK_SHARING_MODE_EXCLUSIVE
8941 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
8944 group->addChild(usageGroup.get());
8945 usageGroup.release();
8949 const string usageGroupName ("all_device");
8950 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
8952 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
8954 const vk::VkDeviceSize size = sizes[sizeNdx];
8955 const string testName (de::toString((deUint64)(size)));
8956 const TestConfig config =
8958 (Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
8960 vk::VK_SHARING_MODE_EXCLUSIVE
8963 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
8966 group->addChild(usageGroup.get());
8967 usageGroup.release();
8971 return group.release();