1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2015 Google Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
21 * \brief Pipeline barrier tests
22 *//*--------------------------------------------------------------------*/
24 #include "vktMemoryPipelineBarrierTests.hpp"
26 #include "vktTestCaseUtil.hpp"
29 #include "vkPlatform.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkQueryUtil.hpp"
32 #include "vkMemUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkPrograms.hpp"
36 #include "tcuMaybe.hpp"
37 #include "tcuTextureUtil.hpp"
38 #include "tcuTestLog.hpp"
39 #include "tcuResultCollector.hpp"
40 #include "tcuTexture.hpp"
41 #include "tcuImageCompare.hpp"
43 #include "deUniquePtr.hpp"
44 #include "deStringUtil.hpp"
45 #include "deRandom.hpp"
68 using tcu::ConstPixelBufferAccess;
69 using tcu::PixelBufferAccess;
70 using tcu::TextureFormat;
71 using tcu::TextureLevel;
81 MAX_UNIFORM_BUFFER_SIZE = 1024,
82 MAX_STORAGE_BUFFER_SIZE = (1<<28)
85 // \todo [mika] Add to utilities
87 T divRoundUp (const T& a, const T& b)
89 return (a / b) + (a % b == 0 ? 0 : 1);
94 ALL_PIPELINE_STAGES = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
95 | vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
96 | vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
97 | vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
98 | vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
99 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
100 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
101 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
102 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
103 | vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
104 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
105 | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
106 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
107 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT
108 | vk::VK_PIPELINE_STAGE_HOST_BIT
113 ALL_ACCESSES = vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT
114 | vk::VK_ACCESS_INDEX_READ_BIT
115 | vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
116 | vk::VK_ACCESS_UNIFORM_READ_BIT
117 | vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
118 | vk::VK_ACCESS_SHADER_READ_BIT
119 | vk::VK_ACCESS_SHADER_WRITE_BIT
120 | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
121 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
122 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
123 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
124 | vk::VK_ACCESS_TRANSFER_READ_BIT
125 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
126 | vk::VK_ACCESS_HOST_READ_BIT
127 | vk::VK_ACCESS_HOST_WRITE_BIT
128 | vk::VK_ACCESS_MEMORY_READ_BIT
129 | vk::VK_ACCESS_MEMORY_WRITE_BIT
134 // Mapped host read and write
135 USAGE_HOST_READ = (0x1u<<0),
136 USAGE_HOST_WRITE = (0x1u<<1),
138 // Copy and other transfer operations
139 USAGE_TRANSFER_SRC = (0x1u<<2),
140 USAGE_TRANSFER_DST = (0x1u<<3),
142 // Buffer usage flags
143 USAGE_INDEX_BUFFER = (0x1u<<4),
144 USAGE_VERTEX_BUFFER = (0x1u<<5),
146 USAGE_UNIFORM_BUFFER = (0x1u<<6),
147 USAGE_STORAGE_BUFFER = (0x1u<<7),
149 USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
150 USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
152 // \todo [2016-03-09 mika] This is probably almost impossible to do
153 USAGE_INDIRECT_BUFFER = (0x1u<<10),
155 // Texture usage flags
156 USAGE_TEXTURE_SAMPLED = (0x1u<<11),
157 USAGE_TEXTURE_STORAGE = (0x1u<<12),
158 USAGE_COLOR_ATTACHMENT = (0x1u<<13),
159 USAGE_INPUT_ATTACHMENT = (0x1u<<14),
160 USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
163 bool supportsDeviceBufferWrites (Usage usage)
165 if (usage & USAGE_TRANSFER_DST)
168 if (usage & USAGE_STORAGE_BUFFER)
171 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
177 bool supportsDeviceImageWrites (Usage usage)
179 if (usage & USAGE_TRANSFER_DST)
182 if (usage & USAGE_TEXTURE_STORAGE)
185 if (usage & USAGE_COLOR_ATTACHMENT)
191 // Sequential access enums
194 ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
195 ACCESS_INDEX_READ_BIT,
196 ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
197 ACCESS_UNIFORM_READ_BIT,
198 ACCESS_INPUT_ATTACHMENT_READ_BIT,
199 ACCESS_SHADER_READ_BIT,
200 ACCESS_SHADER_WRITE_BIT,
201 ACCESS_COLOR_ATTACHMENT_READ_BIT,
202 ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
203 ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
204 ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
205 ACCESS_TRANSFER_READ_BIT,
206 ACCESS_TRANSFER_WRITE_BIT,
207 ACCESS_HOST_READ_BIT,
208 ACCESS_HOST_WRITE_BIT,
209 ACCESS_MEMORY_READ_BIT,
210 ACCESS_MEMORY_WRITE_BIT,
215 // Sequential stage enums
218 PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
219 PIPELINESTAGE_BOTTOM_OF_PIPE_BIT,
220 PIPELINESTAGE_DRAW_INDIRECT_BIT,
221 PIPELINESTAGE_VERTEX_INPUT_BIT,
222 PIPELINESTAGE_VERTEX_SHADER_BIT,
223 PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
224 PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
225 PIPELINESTAGE_GEOMETRY_SHADER_BIT,
226 PIPELINESTAGE_FRAGMENT_SHADER_BIT,
227 PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
228 PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
229 PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
230 PIPELINESTAGE_COMPUTE_SHADER_BIT,
231 PIPELINESTAGE_TRANSFER_BIT,
232 PIPELINESTAGE_HOST_BIT,
237 PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
241 case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT: return PIPELINESTAGE_TOP_OF_PIPE_BIT;
242 case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT: return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
243 case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT: return PIPELINESTAGE_DRAW_INDIRECT_BIT;
244 case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT: return PIPELINESTAGE_VERTEX_INPUT_BIT;
245 case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT: return PIPELINESTAGE_VERTEX_SHADER_BIT;
246 case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
247 case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
248 case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT: return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
249 case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT: return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
250 case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
251 case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
252 case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
253 case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: return PIPELINESTAGE_COMPUTE_SHADER_BIT;
254 case vk::VK_PIPELINE_STAGE_TRANSFER_BIT: return PIPELINESTAGE_TRANSFER_BIT;
255 case vk::VK_PIPELINE_STAGE_HOST_BIT: return PIPELINESTAGE_HOST_BIT;
258 DE_FATAL("Unknown pipeline stage flags");
259 return PIPELINESTAGE_LAST;
263 Usage operator| (Usage a, Usage b)
265 return (Usage)((deUint32)a | (deUint32)b);
268 Usage operator& (Usage a, Usage b)
270 return (Usage)((deUint32)a & (deUint32)b);
273 string usageToName (Usage usage)
278 const char* const name;
281 { USAGE_HOST_READ, "host_read" },
282 { USAGE_HOST_WRITE, "host_write" },
284 { USAGE_TRANSFER_SRC, "transfer_src" },
285 { USAGE_TRANSFER_DST, "transfer_dst" },
287 { USAGE_INDEX_BUFFER, "index_buffer" },
288 { USAGE_VERTEX_BUFFER, "vertex_buffer" },
289 { USAGE_UNIFORM_BUFFER, "uniform_buffer" },
290 { USAGE_STORAGE_BUFFER, "storage_buffer" },
291 { USAGE_UNIFORM_TEXEL_BUFFER, "uniform_texel_buffer" },
292 { USAGE_STORAGE_TEXEL_BUFFER, "storage_texel_buffer" },
293 { USAGE_INDIRECT_BUFFER, "indirect_buffer" },
294 { USAGE_TEXTURE_SAMPLED, "sampled_texture" },
295 { USAGE_TEXTURE_STORAGE, "texture_storage" },
296 { USAGE_COLOR_ATTACHMENT, "color_attachment" },
297 { USAGE_INPUT_ATTACHMENT, "input_attachment" },
298 { USAGE_DEPTH_STENCIL_ATTACHMENT, "depth_stencil_attachment" },
301 std::ostringstream stream;
304 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
306 if (usage & usageNames[usageNdx].usage)
313 stream << usageNames[usageNdx].name;
320 vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
322 vk::VkBufferUsageFlags flags = 0;
324 if (usage & USAGE_TRANSFER_SRC)
325 flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
327 if (usage & USAGE_TRANSFER_DST)
328 flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
330 if (usage & USAGE_INDEX_BUFFER)
331 flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
333 if (usage & USAGE_VERTEX_BUFFER)
334 flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
336 if (usage & USAGE_INDIRECT_BUFFER)
337 flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
339 if (usage & USAGE_UNIFORM_BUFFER)
340 flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
342 if (usage & USAGE_STORAGE_BUFFER)
343 flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
345 if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
346 flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
348 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
349 flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
354 vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
356 vk::VkImageUsageFlags flags = 0;
358 if (usage & USAGE_TRANSFER_SRC)
359 flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
361 if (usage & USAGE_TRANSFER_DST)
362 flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
364 if (usage & USAGE_TEXTURE_SAMPLED)
365 flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
367 if (usage & USAGE_TEXTURE_STORAGE)
368 flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
370 if (usage & USAGE_COLOR_ATTACHMENT)
371 flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
373 if (usage & USAGE_INPUT_ATTACHMENT)
374 flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
376 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
377 flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
382 vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
384 vk::VkPipelineStageFlags flags = 0;
386 if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
387 flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
389 if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
390 flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
392 if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
393 flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
395 if (usage & USAGE_INDIRECT_BUFFER)
396 flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
399 (USAGE_UNIFORM_BUFFER
400 | USAGE_STORAGE_BUFFER
401 | USAGE_UNIFORM_TEXEL_BUFFER
402 | USAGE_STORAGE_TEXEL_BUFFER
403 | USAGE_TEXTURE_SAMPLED
404 | USAGE_TEXTURE_STORAGE))
406 flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
407 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
408 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
409 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
410 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
411 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
414 if (usage & USAGE_INPUT_ATTACHMENT)
415 flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
417 if (usage & USAGE_COLOR_ATTACHMENT)
418 flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
420 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
422 flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
423 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
429 vk::VkAccessFlags usageToAccessFlags (Usage usage)
431 vk::VkAccessFlags flags = 0;
433 if (usage & USAGE_HOST_READ)
434 flags |= vk::VK_ACCESS_HOST_READ_BIT;
436 if (usage & USAGE_HOST_WRITE)
437 flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
439 if (usage & USAGE_TRANSFER_SRC)
440 flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
442 if (usage & USAGE_TRANSFER_DST)
443 flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
445 if (usage & USAGE_INDEX_BUFFER)
446 flags |= vk::VK_ACCESS_INDEX_READ_BIT;
448 if (usage & USAGE_VERTEX_BUFFER)
449 flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
451 if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
452 flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
454 if (usage & (USAGE_STORAGE_BUFFER
455 | USAGE_STORAGE_TEXEL_BUFFER
456 | USAGE_TEXTURE_SAMPLED
457 | USAGE_TEXTURE_STORAGE))
458 flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
460 if (usage & USAGE_INDIRECT_BUFFER)
461 flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
463 if (usage & USAGE_COLOR_ATTACHMENT)
464 flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
466 if (usage & USAGE_INPUT_ATTACHMENT)
467 flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
469 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
470 flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
471 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
479 vk::VkDeviceSize size;
480 vk::VkSharingMode sharing;
483 vk::Move<vk::VkCommandBuffer> createCommandBuffer (const vk::DeviceInterface& vkd,
485 vk::VkCommandPool pool,
486 vk::VkCommandBufferLevel level)
488 const vk::VkCommandBufferAllocateInfo bufferInfo =
490 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
498 return vk::allocateCommandBuffer(vkd, device, &bufferInfo);
501 vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface& vkd,
503 vk::VkCommandPool pool,
504 vk::VkCommandBufferLevel level)
506 const vk::VkCommandBufferInheritanceInfo inheritInfo =
508 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
517 const vk::VkCommandBufferBeginInfo beginInfo =
519 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
522 (level == vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY ? &inheritInfo : (const vk::VkCommandBufferInheritanceInfo*)DE_NULL),
525 vk::Move<vk::VkCommandBuffer> commandBuffer (createCommandBuffer(vkd, device, pool, level));
527 vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
529 return commandBuffer;
532 vk::Move<vk::VkCommandPool> createCommandPool (const vk::DeviceInterface& vkd,
534 deUint32 queueFamilyIndex)
536 const vk::VkCommandPoolCreateInfo poolInfo =
538 vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
541 vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
545 return vk::createCommandPool(vkd, device, &poolInfo);
548 vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface& vkd,
550 vk::VkDeviceSize size,
551 vk::VkBufferUsageFlags usage,
552 vk::VkSharingMode sharingMode,
553 const vector<deUint32>& queueFamilies)
555 const vk::VkBufferCreateInfo createInfo =
557 vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
564 (deUint32)queueFamilies.size(),
568 return vk::createBuffer(vkd, device, &createInfo);
571 vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface& vkd,
573 vk::VkDeviceSize size,
574 deUint32 memoryTypeIndex)
576 const vk::VkMemoryAllocateInfo alloc =
578 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
585 return vk::allocateMemory(vkd, device, &alloc);
588 vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface& vki,
589 const vk::DeviceInterface& vkd,
590 vk::VkPhysicalDevice physicalDevice,
593 vk::VkMemoryPropertyFlags properties)
595 const vk::VkMemoryRequirements memoryRequirements = vk::getBufferMemoryRequirements(vkd, device, buffer);
596 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
597 deUint32 memoryTypeIndex;
599 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
601 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
602 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
606 const vk::VkMemoryAllocateInfo allocationInfo =
608 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
610 memoryRequirements.size,
613 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
615 VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
619 catch (const vk::Error& error)
621 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
622 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
624 // Try next memory type/heap if out of memory
628 // Throw all other errors forward
635 TCU_FAIL("Failed to allocate memory for buffer");
638 vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface& vki,
639 const vk::DeviceInterface& vkd,
640 vk::VkPhysicalDevice physicalDevice,
643 vk::VkMemoryPropertyFlags properties)
645 const vk::VkMemoryRequirements memoryRequirements = vk::getImageMemoryRequirements(vkd, device, image);
646 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
647 deUint32 memoryTypeIndex;
649 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
651 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
652 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
656 const vk::VkMemoryAllocateInfo allocationInfo =
658 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
660 memoryRequirements.size,
663 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
665 VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
669 catch (const vk::Error& error)
671 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
672 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
674 // Try next memory type/heap if out of memory
678 // Throw all other errors forward
685 TCU_FAIL("Failed to allocate memory for image");
688 void queueRun (const vk::DeviceInterface& vkd,
690 vk::VkCommandBuffer commandBuffer)
692 const vk::VkSubmitInfo submitInfo =
694 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
699 (const vk::VkPipelineStageFlags*)DE_NULL,
708 VK_CHECK(vkd.queueSubmit(queue, 1, &submitInfo, 0));
709 VK_CHECK(vkd.queueWaitIdle(queue));
712 void* mapMemory (const vk::DeviceInterface& vkd,
714 vk::VkDeviceMemory memory,
715 vk::VkDeviceSize size)
719 VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
724 class ReferenceMemory
727 ReferenceMemory (size_t size);
729 void set (size_t pos, deUint8 val);
730 deUint8 get (size_t pos) const;
731 bool isDefined (size_t pos) const;
733 void setDefined (size_t offset, size_t size, const void* data);
734 void setUndefined (size_t offset, size_t size);
735 void setData (size_t offset, size_t size, const void* data);
737 size_t getSize (void) const { return m_data.size(); }
740 vector<deUint8> m_data;
741 vector<deUint64> m_defined;
744 ReferenceMemory::ReferenceMemory (size_t size)
746 , m_defined (size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
750 void ReferenceMemory::set (size_t pos, deUint8 val)
752 DE_ASSERT(pos < m_data.size());
755 m_defined[pos / 64] |= 0x1ull << (pos % 64);
758 void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
760 const deUint8* data = (const deUint8*)data_;
762 DE_ASSERT(offset < m_data.size());
763 DE_ASSERT(offset + size <= m_data.size());
765 // \todo [2016-03-09 mika] Optimize
766 for (size_t pos = 0; pos < size; pos++)
768 m_data[offset + pos] = data[pos];
769 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
773 void ReferenceMemory::setUndefined (size_t offset, size_t size)
775 // \todo [2016-03-09 mika] Optimize
776 for (size_t pos = 0; pos < size; pos++)
777 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
780 deUint8 ReferenceMemory::get (size_t pos) const
782 DE_ASSERT(pos < m_data.size());
783 DE_ASSERT(isDefined(pos));
787 bool ReferenceMemory::isDefined (size_t pos) const
789 DE_ASSERT(pos < m_data.size());
791 return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
797 Memory (const vk::InstanceInterface& vki,
798 const vk::DeviceInterface& vkd,
799 vk::VkPhysicalDevice physicalDevice,
801 vk::VkDeviceSize size,
802 deUint32 memoryTypeIndex,
803 vk::VkDeviceSize maxBufferSize,
804 deInt32 maxImageWidth,
805 deInt32 maxImageHeight);
807 vk::VkDeviceSize getSize (void) const { return m_size; }
808 vk::VkDeviceSize getMaxBufferSize (void) const { return m_maxBufferSize; }
809 bool getSupportBuffers (void) const { return m_maxBufferSize > 0; }
811 deInt32 getMaxImageWidth (void) const { return m_maxImageWidth; }
812 deInt32 getMaxImageHeight (void) const { return m_maxImageHeight; }
813 bool getSupportImages (void) const { return m_maxImageWidth > 0; }
815 const vk::VkMemoryType& getMemoryType (void) const { return m_memoryType; }
816 deUint32 getMemoryTypeIndex (void) const { return m_memoryTypeIndex; }
817 vk::VkDeviceMemory getMemory (void) const { return *m_memory; }
820 const vk::VkDeviceSize m_size;
821 const deUint32 m_memoryTypeIndex;
822 const vk::VkMemoryType m_memoryType;
823 const vk::Unique<vk::VkDeviceMemory> m_memory;
824 const vk::VkDeviceSize m_maxBufferSize;
825 const deInt32 m_maxImageWidth;
826 const deInt32 m_maxImageHeight;
829 vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface& vki,
830 vk::VkPhysicalDevice device,
831 deUint32 memoryTypeIndex)
833 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
835 DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
837 return memoryProperties.memoryTypes[memoryTypeIndex];
840 vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface& vkd,
843 vk::VkBufferUsageFlags usage,
844 vk::VkSharingMode sharingMode,
845 const vector<deUint32>& queueFamilies,
847 vk::VkDeviceSize memorySize,
848 deUint32 memoryTypeIndex)
850 vk::VkDeviceSize lastSuccess = 0;
851 vk::VkDeviceSize currentSize = memorySize / 2;
854 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
855 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
857 if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
861 for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
863 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
864 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
866 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
868 lastSuccess = currentSize;
869 currentSize += stepSize;
872 currentSize -= stepSize;
881 // Round size down maximum W * H * 4, where W and H < 4096
882 vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
884 const vk::VkDeviceSize maxTextureSize = 4096;
885 vk::VkDeviceSize maxTexelCount = size / 4;
886 vk::VkDeviceSize bestW = de::max(maxTexelCount, maxTextureSize);
887 vk::VkDeviceSize bestH = maxTexelCount / bestW;
889 // \todo [2016-03-09 mika] Could probably be faster?
890 for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
892 const vk::VkDeviceSize h = maxTexelCount / w;
894 if (bestW * bestH < w * h)
901 return bestW * bestH * 4;
904 // Find RGBA8 image size that has exactly "size" of number of bytes.
905 // "size" must be W * H * 4 where W and H < 4096
906 IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
908 const vk::VkDeviceSize maxTextureSize = 4096;
909 vk::VkDeviceSize texelCount = size / 4;
911 DE_ASSERT((size % 4) == 0);
913 // \todo [2016-03-09 mika] Could probably be faster?
914 for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
916 const vk::VkDeviceSize h = texelCount / w;
918 if ((texelCount % w) == 0 && h < maxTextureSize)
919 return IVec2((int)w, (int)h);
922 DE_FATAL("Invalid size");
923 return IVec2(-1, -1);
926 IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface& vkd,
929 vk::VkImageUsageFlags usage,
930 vk::VkSharingMode sharingMode,
931 const vector<deUint32>& queueFamilies,
933 vk::VkDeviceSize memorySize,
934 deUint32 memoryTypeIndex)
936 IVec2 lastSuccess (0);
940 const deUint32 texelCount = (deUint32)(memorySize / 4);
941 const deUint32 width = (deUint32)deFloatSqrt((float)texelCount);
942 const deUint32 height = texelCount / width;
944 currentSize[0] = deMaxu32(width, height);
945 currentSize[1] = deMinu32(width, height);
948 for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
950 const vk::VkImageCreateInfo createInfo =
952 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
956 vk::VK_IMAGE_TYPE_2D,
957 vk::VK_FORMAT_R8G8B8A8_UNORM,
959 (deUint32)currentSize[0],
960 (deUint32)currentSize[1],
964 vk::VK_SAMPLE_COUNT_1_BIT,
965 vk::VK_IMAGE_TILING_OPTIMAL,
968 (deUint32)queueFamilies.size(),
970 vk::VK_IMAGE_LAYOUT_UNDEFINED
972 const vk::Unique<vk::VkImage> image (vk::createImage(vkd, device, &createInfo));
973 const vk::VkMemoryRequirements requirements (vk::getImageMemoryRequirements(vkd, device, *image));
975 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
977 lastSuccess = currentSize;
978 currentSize[0] += stepSize;
979 currentSize[1] += stepSize;
983 currentSize[0] -= stepSize;
984 currentSize[1] -= stepSize;
994 Memory::Memory (const vk::InstanceInterface& vki,
995 const vk::DeviceInterface& vkd,
996 vk::VkPhysicalDevice physicalDevice,
998 vk::VkDeviceSize size,
999 deUint32 memoryTypeIndex,
1000 vk::VkDeviceSize maxBufferSize,
1001 deInt32 maxImageWidth,
1002 deInt32 maxImageHeight)
1004 , m_memoryTypeIndex (memoryTypeIndex)
1005 , m_memoryType (getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
1006 , m_memory (allocMemory(vkd, device, size, memoryTypeIndex))
1007 , m_maxBufferSize (maxBufferSize)
1008 , m_maxImageWidth (maxImageWidth)
1009 , m_maxImageHeight (maxImageHeight)
1016 Context (const vk::InstanceInterface& vki,
1017 const vk::DeviceInterface& vkd,
1018 vk::VkPhysicalDevice physicalDevice,
1019 vk::VkDevice device,
1021 deUint32 queueFamilyIndex,
1022 const vector<pair<deUint32, vk::VkQueue> >& queues,
1023 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection)
1026 , m_physicalDevice (physicalDevice)
1029 , m_queueFamilyIndex (queueFamilyIndex)
1031 , m_commandPool (createCommandPool(vkd, device, queueFamilyIndex))
1032 , m_binaryCollection (binaryCollection)
1034 for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
1035 m_queueFamilies.push_back(m_queues[queueNdx].first);
1038 const vk::InstanceInterface& getInstanceInterface (void) const { return m_vki; }
1039 vk::VkPhysicalDevice getPhysicalDevice (void) const { return m_physicalDevice; }
1040 vk::VkDevice getDevice (void) const { return m_device; }
1041 const vk::DeviceInterface& getDeviceInterface (void) const { return m_vkd; }
1042 vk::VkQueue getQueue (void) const { return m_queue; }
1043 deUint32 getQueueFamily (void) const { return m_queueFamilyIndex; }
1044 const vector<pair<deUint32, vk::VkQueue> >& getQueues (void) const { return m_queues; }
1045 const vector<deUint32> getQueueFamilies (void) const { return m_queueFamilies; }
1046 vk::VkCommandPool getCommandPool (void) const { return *m_commandPool; }
1047 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_binaryCollection; }
1050 const vk::InstanceInterface& m_vki;
1051 const vk::DeviceInterface& m_vkd;
1052 const vk::VkPhysicalDevice m_physicalDevice;
1053 const vk::VkDevice m_device;
1054 const vk::VkQueue m_queue;
1055 const deUint32 m_queueFamilyIndex;
1056 const vector<pair<deUint32, vk::VkQueue> >& m_queues;
1057 const vk::Unique<vk::VkCommandPool> m_commandPool;
1058 const vk::ProgramCollection<vk::ProgramBinary>& m_binaryCollection;
1059 vector<deUint32> m_queueFamilies;
1062 class PrepareContext
1065 PrepareContext (const Context& context,
1066 const Memory& memory)
1067 : m_context (context)
1072 const Memory& getMemory (void) const { return m_memory; }
1073 const Context& getContext (void) const { return m_context; }
1074 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
1076 void setBuffer (vk::Move<vk::VkBuffer> buffer,
1077 vk::VkDeviceSize size)
1079 DE_ASSERT(!m_currentImage);
1080 DE_ASSERT(!m_currentBuffer);
1082 m_currentBuffer = buffer;
1083 m_currentBufferSize = size;
1086 vk::VkBuffer getBuffer (void) const { return *m_currentBuffer; }
1087 vk::VkDeviceSize getBufferSize (void) const
1089 DE_ASSERT(m_currentBuffer);
1090 return m_currentBufferSize;
1093 void releaseBuffer (void) { m_currentBuffer.disown(); }
1095 void setImage (vk::Move<vk::VkImage> image,
1096 vk::VkImageLayout layout,
1097 vk::VkDeviceSize memorySize,
1101 DE_ASSERT(!m_currentImage);
1102 DE_ASSERT(!m_currentBuffer);
1104 m_currentImage = image;
1105 m_currentImageMemorySize = memorySize;
1106 m_currentImageLayout = layout;
1107 m_currentImageWidth = width;
1108 m_currentImageHeight = height;
1111 void setImageLayout (vk::VkImageLayout layout)
1113 DE_ASSERT(m_currentImage);
1114 m_currentImageLayout = layout;
1117 vk::VkImage getImage (void) const { return *m_currentImage; }
1118 deInt32 getImageWidth (void) const
1120 DE_ASSERT(m_currentImage);
1121 return m_currentImageWidth;
1123 deInt32 getImageHeight (void) const
1125 DE_ASSERT(m_currentImage);
1126 return m_currentImageHeight;
1128 vk::VkDeviceSize getImageMemorySize (void) const
1130 DE_ASSERT(m_currentImage);
1131 return m_currentImageMemorySize;
1134 void releaseImage (void) { m_currentImage.disown(); }
1136 vk::VkImageLayout getImageLayout (void) const
1138 DE_ASSERT(m_currentImage);
1139 return m_currentImageLayout;
1143 const Context& m_context;
1144 const Memory& m_memory;
1146 vk::Move<vk::VkBuffer> m_currentBuffer;
1147 vk::VkDeviceSize m_currentBufferSize;
1149 vk::Move<vk::VkImage> m_currentImage;
1150 vk::VkDeviceSize m_currentImageMemorySize;
1151 vk::VkImageLayout m_currentImageLayout;
1152 deInt32 m_currentImageWidth;
1153 deInt32 m_currentImageHeight;
1156 class ExecuteContext
1159 ExecuteContext (const Context& context)
1160 : m_context (context)
1164 const Context& getContext (void) const { return m_context; }
1165 void setMapping (void* ptr) { m_mapping = ptr; }
1166 void* getMapping (void) const { return m_mapping; }
1169 const Context& m_context;
1176 VerifyContext (TestLog& log,
1177 tcu::ResultCollector& resultCollector,
1178 const Context& context,
1179 vk::VkDeviceSize size)
1181 , m_resultCollector (resultCollector)
1182 , m_context (context)
1183 , m_reference ((size_t)size)
1187 const Context& getContext (void) const { return m_context; }
1188 TestLog& getLog (void) const { return m_log; }
1189 tcu::ResultCollector& getResultCollector (void) const { return m_resultCollector; }
1191 ReferenceMemory& getReference (void) { return m_reference; }
1192 TextureLevel& getReferenceImage (void) { return m_referenceImage;}
1196 tcu::ResultCollector& m_resultCollector;
1197 const Context& m_context;
1198 ReferenceMemory m_reference;
1199 TextureLevel m_referenceImage;
1205 // Constructor should allocate all non-vulkan resources.
1206 virtual ~Command (void) {}
1208 // Get name of the command
1209 virtual const char* getName (void) const = 0;
1211 // Log prepare operations
1212 virtual void logPrepare (TestLog&, size_t) const {}
1213 // Log executed operations
1214 virtual void logExecute (TestLog&, size_t) const {}
1216 // Prepare should allocate all vulkan resources and resources that require
1217 // that buffer or memory has been already allocated. This should build all
1218 // command buffers etc.
1219 virtual void prepare (PrepareContext&) {}
1221 // Execute command. Write or read mapped memory, submit commands to queue
1223 virtual void execute (ExecuteContext&) {}
1225 // Verify that results are correct.
1226 virtual void verify (VerifyContext&, size_t) {}
1229 // Allow only inheritance
1234 Command (const Command&);
1235 Command& operator& (const Command&);
1238 class Map : public Command
1243 const char* getName (void) const { return "Map"; }
1246 void logExecute (TestLog& log, size_t commandIndex) const
1248 log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
1251 void prepare (PrepareContext& context)
1253 m_memory = context.getMemory().getMemory();
1254 m_size = context.getMemory().getSize();
1257 void execute (ExecuteContext& context)
1259 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1260 const vk::VkDevice device = context.getContext().getDevice();
1262 context.setMapping(mapMemory(vkd, device, m_memory, m_size));
1266 vk::VkDeviceMemory m_memory;
1267 vk::VkDeviceSize m_size;
1270 class UnMap : public Command
1275 const char* getName (void) const { return "UnMap"; }
1277 void logExecute (TestLog& log, size_t commandIndex) const
1279 log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
1282 void prepare (PrepareContext& context)
1284 m_memory = context.getMemory().getMemory();
1287 void execute (ExecuteContext& context)
1289 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1290 const vk::VkDevice device = context.getContext().getDevice();
1292 vkd.unmapMemory(device, m_memory);
1293 context.setMapping(DE_NULL);
1297 vk::VkDeviceMemory m_memory;
1300 class Invalidate : public Command
1303 Invalidate (void) {}
1304 ~Invalidate (void) {}
1305 const char* getName (void) const { return "Invalidate"; }
1307 void logExecute (TestLog& log, size_t commandIndex) const
1309 log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
1312 void prepare (PrepareContext& context)
1314 m_memory = context.getMemory().getMemory();
1315 m_size = context.getMemory().getSize();
1318 void execute (ExecuteContext& context)
1320 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1321 const vk::VkDevice device = context.getContext().getDevice();
1323 vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1327 vk::VkDeviceMemory m_memory;
1328 vk::VkDeviceSize m_size;
1331 class Flush : public Command
1336 const char* getName (void) const { return "Flush"; }
1338 void logExecute (TestLog& log, size_t commandIndex) const
1340 log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
1343 void prepare (PrepareContext& context)
1345 m_memory = context.getMemory().getMemory();
1346 m_size = context.getMemory().getSize();
1349 void execute (ExecuteContext& context)
1351 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1352 const vk::VkDevice device = context.getContext().getDevice();
1354 vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1358 vk::VkDeviceMemory m_memory;
1359 vk::VkDeviceSize m_size;
1362 // Host memory reads and writes
1363 class HostMemoryAccess : public Command
1366 HostMemoryAccess (bool read, bool write, deUint32 seed);
1367 ~HostMemoryAccess (void) {}
1368 const char* getName (void) const { return "HostMemoryAccess"; }
1370 void logExecute (TestLog& log, size_t commandIndex) const;
1371 void prepare (PrepareContext& context);
1372 void execute (ExecuteContext& context);
1374 void verify (VerifyContext& context, size_t commandIndex);
1379 const deUint32 m_seed;
1382 vector<deUint8> m_readData;
1385 HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
1392 void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
1394 log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "") << ", seed: " << m_seed << TestLog::EndMessage;
1397 void HostMemoryAccess::prepare (PrepareContext& context)
1399 m_size = (size_t)context.getMemory().getSize();
1402 m_readData.resize(m_size, 0);
1405 void HostMemoryAccess::execute (ExecuteContext& context)
1407 de::Random rng (m_seed);
1408 deUint8* const ptr = (deUint8*)context.getMapping();
1410 if (m_read && m_write)
1412 for (size_t pos = 0; pos < m_size; pos++)
1414 const deUint8 mask = rng.getUint8();
1415 const deUint8 value = ptr[pos];
1417 m_readData[pos] = value;
1418 ptr[pos] = value ^ mask;
1423 for (size_t pos = 0; pos < m_size; pos++)
1425 const deUint8 value = ptr[pos];
1427 m_readData[pos] = value;
1432 for (size_t pos = 0; pos < m_size; pos++)
1434 const deUint8 value = rng.getUint8();
1440 DE_FATAL("Host memory access without read or write.");
1443 void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
1445 tcu::ResultCollector& resultCollector = context.getResultCollector();
1446 ReferenceMemory& reference = context.getReference();
1447 de::Random rng (m_seed);
1449 if (m_read && m_write)
1451 for (size_t pos = 0; pos < m_size; pos++)
1453 const deUint8 mask = rng.getUint8();
1454 const deUint8 value = m_readData[pos];
1456 if (reference.isDefined(pos))
1458 if (value != reference.get(pos))
1460 resultCollector.fail(
1461 de::toString(commandIndex) + ":" + getName()
1462 + " Result differs from reference, Expected: "
1463 + de::toString(tcu::toHex<8>(reference.get(pos)))
1465 + de::toString(tcu::toHex<8>(value))
1467 + de::toString(pos));
1471 reference.set(pos, reference.get(pos) ^ mask);
1477 for (size_t pos = 0; pos < m_size; pos++)
1479 const deUint8 value = m_readData[pos];
1481 if (reference.isDefined(pos))
1483 if (value != reference.get(pos))
1485 resultCollector.fail(
1486 de::toString(commandIndex) + ":" + getName()
1487 + " Result differs from reference, Expected: "
1488 + de::toString(tcu::toHex<8>(reference.get(pos)))
1490 + de::toString(tcu::toHex<8>(value))
1492 + de::toString(pos));
1500 for (size_t pos = 0; pos < m_size; pos++)
1502 const deUint8 value = rng.getUint8();
1504 reference.set(pos, value);
1508 DE_FATAL("Host memory access without read or write.");
1511 class CreateBuffer : public Command
1514 CreateBuffer (vk::VkBufferUsageFlags usage,
1515 vk::VkSharingMode sharing);
1516 ~CreateBuffer (void) {}
1517 const char* getName (void) const { return "CreateBuffer"; }
1519 void logPrepare (TestLog& log, size_t commandIndex) const;
1520 void prepare (PrepareContext& context);
1523 const vk::VkBufferUsageFlags m_usage;
1524 const vk::VkSharingMode m_sharing;
1527 CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags usage,
1528 vk::VkSharingMode sharing)
1530 , m_sharing (sharing)
1534 void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1536 log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
1539 void CreateBuffer::prepare (PrepareContext& context)
1541 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1542 const vk::VkDevice device = context.getContext().getDevice();
1543 const vk::VkDeviceSize bufferSize = context.getMemory().getMaxBufferSize();
1544 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1546 context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
1549 class DestroyBuffer : public Command
1552 DestroyBuffer (void);
1553 ~DestroyBuffer (void) {}
1554 const char* getName (void) const { return "DestroyBuffer"; }
1556 void logExecute (TestLog& log, size_t commandIndex) const;
1557 void prepare (PrepareContext& context);
1558 void execute (ExecuteContext& context);
1561 vk::Move<vk::VkBuffer> m_buffer;
1564 DestroyBuffer::DestroyBuffer (void)
1568 void DestroyBuffer::prepare (PrepareContext& context)
1570 m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1571 context.releaseBuffer();
1574 void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
1576 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
1579 void DestroyBuffer::execute (ExecuteContext& context)
1581 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1582 const vk::VkDevice device = context.getContext().getDevice();
1584 vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
1587 class BindBufferMemory : public Command
1590 BindBufferMemory (void) {}
1591 ~BindBufferMemory (void) {}
1592 const char* getName (void) const { return "BindBufferMemory"; }
1594 void logPrepare (TestLog& log, size_t commandIndex) const;
1595 void prepare (PrepareContext& context);
1598 void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
1600 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
1603 void BindBufferMemory::prepare (PrepareContext& context)
1605 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1606 const vk::VkDevice device = context.getContext().getDevice();
1608 VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
1611 class CreateImage : public Command
1614 CreateImage (vk::VkImageUsageFlags usage,
1615 vk::VkSharingMode sharing);
1616 ~CreateImage (void) {}
1617 const char* getName (void) const { return "CreateImage"; }
1619 void logPrepare (TestLog& log, size_t commandIndex) const;
1620 void prepare (PrepareContext& context);
1621 void verify (VerifyContext& context, size_t commandIndex);
1624 const vk::VkImageUsageFlags m_usage;
1625 const vk::VkSharingMode m_sharing;
1626 deInt32 m_imageWidth;
1627 deInt32 m_imageHeight;
1630 CreateImage::CreateImage (vk::VkImageUsageFlags usage,
1631 vk::VkSharingMode sharing)
1633 , m_sharing (sharing)
1637 void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
1639 log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage) << TestLog::EndMessage;
1642 void CreateImage::prepare (PrepareContext& context)
1644 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1645 const vk::VkDevice device = context.getContext().getDevice();
1646 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1648 m_imageWidth = context.getMemory().getMaxImageWidth();
1649 m_imageHeight = context.getMemory().getMaxImageHeight();
1652 const vk::VkImageCreateInfo createInfo =
1654 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1658 vk::VK_IMAGE_TYPE_2D,
1659 vk::VK_FORMAT_R8G8B8A8_UNORM,
1661 (deUint32)m_imageWidth,
1662 (deUint32)m_imageHeight,
1666 vk::VK_SAMPLE_COUNT_1_BIT,
1667 vk::VK_IMAGE_TILING_OPTIMAL,
1670 (deUint32)queueFamilies.size(),
1672 vk::VK_IMAGE_LAYOUT_UNDEFINED
1674 vk::Move<vk::VkImage> image (createImage(vkd, device, &createInfo));
1675 const vk::VkMemoryRequirements requirements = vk::getImageMemoryRequirements(vkd, device, *image);
1677 context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
1681 void CreateImage::verify (VerifyContext& context, size_t)
1683 context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
1686 class DestroyImage : public Command
1689 DestroyImage (void);
1690 ~DestroyImage (void) {}
1691 const char* getName (void) const { return "DestroyImage"; }
1693 void logExecute (TestLog& log, size_t commandIndex) const;
1694 void prepare (PrepareContext& context);
1695 void execute (ExecuteContext& context);
1698 vk::Move<vk::VkImage> m_image;
1701 DestroyImage::DestroyImage (void)
1705 void DestroyImage::prepare (PrepareContext& context)
1707 m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1708 context.releaseImage();
1712 void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
1714 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
1717 void DestroyImage::execute (ExecuteContext& context)
1719 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1720 const vk::VkDevice device = context.getContext().getDevice();
1722 vkd.destroyImage(device, m_image.disown(), DE_NULL);
1725 class BindImageMemory : public Command
1728 BindImageMemory (void) {}
1729 ~BindImageMemory (void) {}
1730 const char* getName (void) const { return "BindImageMemory"; }
1732 void logPrepare (TestLog& log, size_t commandIndex) const;
1733 void prepare (PrepareContext& context);
1736 void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
1738 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
1741 void BindImageMemory::prepare (PrepareContext& context)
1743 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1744 const vk::VkDevice device = context.getContext().getDevice();
1746 VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
1749 class QueueWaitIdle : public Command
1752 QueueWaitIdle (void) {}
1753 ~QueueWaitIdle (void) {}
1754 const char* getName (void) const { return "QueuetWaitIdle"; }
1756 void logExecute (TestLog& log, size_t commandIndex) const;
1757 void execute (ExecuteContext& context);
1760 void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1762 log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
1765 void QueueWaitIdle::execute (ExecuteContext& context)
1767 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1768 const vk::VkQueue queue = context.getContext().getQueue();
1770 VK_CHECK(vkd.queueWaitIdle(queue));
1773 class DeviceWaitIdle : public Command
1776 DeviceWaitIdle (void) {}
1777 ~DeviceWaitIdle (void) {}
1778 const char* getName (void) const { return "DeviceWaitIdle"; }
1780 void logExecute (TestLog& log, size_t commandIndex) const;
1781 void execute (ExecuteContext& context);
1784 void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1786 log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
1789 void DeviceWaitIdle::execute (ExecuteContext& context)
1791 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1792 const vk::VkDevice device = context.getContext().getDevice();
1794 VK_CHECK(vkd.deviceWaitIdle(device));
1800 SubmitContext (const PrepareContext& context,
1801 const vk::VkCommandBuffer commandBuffer)
1802 : m_context (context)
1803 , m_commandBuffer (commandBuffer)
1807 const Memory& getMemory (void) const { return m_context.getMemory(); }
1808 const Context& getContext (void) const { return m_context.getContext(); }
1809 vk::VkCommandBuffer getCommandBuffer (void) const { return m_commandBuffer; }
1811 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
1812 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
1814 vk::VkImage getImage (void) const { return m_context.getImage(); }
1815 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
1816 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
1819 const PrepareContext& m_context;
1820 const vk::VkCommandBuffer m_commandBuffer;
1826 virtual ~CmdCommand (void) {}
1827 virtual const char* getName (void) const = 0;
1829 // Log things that are done during prepare
1830 virtual void logPrepare (TestLog&, size_t) const {}
1831 // Log submitted calls etc.
1832 virtual void logSubmit (TestLog&, size_t) const {}
1834 // Allocate vulkan resources and prepare for submit.
1835 virtual void prepare (PrepareContext&) {}
1837 // Submit commands to command buffer.
1838 virtual void submit (SubmitContext&) {}
1841 virtual void verify (VerifyContext&, size_t) {}
1844 class SubmitCommandBuffer : public Command
1847 SubmitCommandBuffer (const vector<CmdCommand*>& commands);
1848 ~SubmitCommandBuffer (void);
1850 const char* getName (void) const { return "SubmitCommandBuffer"; }
1851 void logExecute (TestLog& log, size_t commandIndex) const;
1852 void logPrepare (TestLog& log, size_t commandIndex) const;
1854 // Allocate command buffer and submit commands to command buffer
1855 void prepare (PrepareContext& context);
1856 void execute (ExecuteContext& context);
1858 // Verify that results are correct.
1859 void verify (VerifyContext& context, size_t commandIndex);
1862 vector<CmdCommand*> m_commands;
1863 vk::Move<vk::VkCommandBuffer> m_commandBuffer;
1866 SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
1867 : m_commands (commands)
1871 SubmitCommandBuffer::~SubmitCommandBuffer (void)
1873 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1874 delete m_commands[cmdNdx];
1877 void SubmitCommandBuffer::prepare (PrepareContext& context)
1879 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1880 const vk::VkDevice device = context.getContext().getDevice();
1881 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
1883 m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1885 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1887 CmdCommand& command = *m_commands[cmdNdx];
1889 command.prepare(context);
1893 SubmitContext submitContext (context, *m_commandBuffer);
1895 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1897 CmdCommand& command = *m_commands[cmdNdx];
1899 command.submit(submitContext);
1902 VK_CHECK(vkd.endCommandBuffer(*m_commandBuffer));
1906 void SubmitCommandBuffer::execute (ExecuteContext& context)
1908 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1909 const vk::VkCommandBuffer cmd = *m_commandBuffer;
1910 const vk::VkQueue queue = context.getContext().getQueue();
1911 const vk::VkSubmitInfo submit =
1913 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
1918 (const vk::VkPipelineStageFlags*)DE_NULL,
1927 vkd.queueSubmit(queue, 1, &submit, 0);
1930 void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
1932 const string sectionName (de::toString(commandIndex) + ":" + getName());
1933 const tcu::ScopedLogSection section (context.getLog(), sectionName, sectionName);
1935 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1936 m_commands[cmdNdx]->verify(context, cmdNdx);
1939 void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1941 const string sectionName (de::toString(commandIndex) + ":" + getName());
1942 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1944 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1945 m_commands[cmdNdx]->logPrepare(log, cmdNdx);
1948 void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
1950 const string sectionName (de::toString(commandIndex) + ":" + getName());
1951 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1953 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1954 m_commands[cmdNdx]->logSubmit(log, cmdNdx);
1957 class PipelineBarrier : public CmdCommand
1967 PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1968 const vk::VkAccessFlags srcAccesses,
1969 const vk::VkPipelineStageFlags dstStages,
1970 const vk::VkAccessFlags dstAccesses,
1972 const tcu::Maybe<vk::VkImageLayout> imageLayout);
1973 ~PipelineBarrier (void) {}
1974 const char* getName (void) const { return "PipelineBarrier"; }
1976 void logSubmit (TestLog& log, size_t commandIndex) const;
1977 void submit (SubmitContext& context);
1980 const vk::VkPipelineStageFlags m_srcStages;
1981 const vk::VkAccessFlags m_srcAccesses;
1982 const vk::VkPipelineStageFlags m_dstStages;
1983 const vk::VkAccessFlags m_dstAccesses;
1985 const tcu::Maybe<vk::VkImageLayout> m_imageLayout;
1988 PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1989 const vk::VkAccessFlags srcAccesses,
1990 const vk::VkPipelineStageFlags dstStages,
1991 const vk::VkAccessFlags dstAccesses,
1993 const tcu::Maybe<vk::VkImageLayout> imageLayout)
1994 : m_srcStages (srcStages)
1995 , m_srcAccesses (srcAccesses)
1996 , m_dstStages (dstStages)
1997 , m_dstAccesses (dstAccesses)
1999 , m_imageLayout (imageLayout)
2003 void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
2005 log << TestLog::Message << commandIndex << ":" << getName()
2006 << " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
2007 : m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
2008 : "Image pipeline barrier")
2009 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2010 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
2013 void PipelineBarrier::submit (SubmitContext& context)
2015 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2016 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2018 // \todo [2016-01-08 pyry] This could be cleaned up thanks to latest API changes
2023 const vk::VkMemoryBarrier barrier =
2025 vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
2032 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 1, &barrier, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2038 const vk::VkBufferMemoryBarrier barrier =
2040 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2046 VK_QUEUE_FAMILY_IGNORED,
2047 VK_QUEUE_FAMILY_IGNORED,
2049 context.getBuffer(),
2054 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2060 const vk::VkImageMemoryBarrier barrier =
2062 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2071 VK_QUEUE_FAMILY_IGNORED,
2072 VK_QUEUE_FAMILY_IGNORED,
2076 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2082 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2087 DE_FATAL("Unknown pipeline barrier type");
2091 class ImageTransition : public CmdCommand
2094 ImageTransition (vk::VkPipelineStageFlags srcStages,
2095 vk::VkAccessFlags srcAccesses,
2097 vk::VkPipelineStageFlags dstStages,
2098 vk::VkAccessFlags dstAccesses,
2100 vk::VkImageLayout srcLayout,
2101 vk::VkImageLayout dstLayout);
2103 ~ImageTransition (void) {}
2104 const char* getName (void) const { return "ImageTransition"; }
2106 void prepare (PrepareContext& context);
2107 void logSubmit (TestLog& log, size_t commandIndex) const;
2108 void submit (SubmitContext& context);
2109 void verify (VerifyContext& context, size_t);
2112 const vk::VkPipelineStageFlags m_srcStages;
2113 const vk::VkAccessFlags m_srcAccesses;
2114 const vk::VkPipelineStageFlags m_dstStages;
2115 const vk::VkAccessFlags m_dstAccesses;
2116 const vk::VkImageLayout m_srcLayout;
2117 const vk::VkImageLayout m_dstLayout;
2119 vk::VkDeviceSize m_imageMemorySize;
2122 ImageTransition::ImageTransition (vk::VkPipelineStageFlags srcStages,
2123 vk::VkAccessFlags srcAccesses,
2125 vk::VkPipelineStageFlags dstStages,
2126 vk::VkAccessFlags dstAccesses,
2128 vk::VkImageLayout srcLayout,
2129 vk::VkImageLayout dstLayout)
2130 : m_srcStages (srcStages)
2131 , m_srcAccesses (srcAccesses)
2132 , m_dstStages (dstStages)
2133 , m_dstAccesses (dstAccesses)
2134 , m_srcLayout (srcLayout)
2135 , m_dstLayout (dstLayout)
2139 void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
2141 log << TestLog::Message << commandIndex << ":" << getName()
2142 << " Image transition pipeline barrier"
2143 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2144 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses)
2145 << ", srcLayout: " << m_srcLayout << ", dstLayout: " << m_dstLayout << TestLog::EndMessage;
2148 void ImageTransition::prepare (PrepareContext& context)
2150 DE_ASSERT(context.getImageLayout() == vk::VK_IMAGE_LAYOUT_UNDEFINED || m_srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED || context.getImageLayout() == m_srcLayout);
2152 context.setImageLayout(m_dstLayout);
2153 m_imageMemorySize = context.getImageMemorySize();
2156 void ImageTransition::submit (SubmitContext& context)
2158 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2159 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2160 const vk::VkImageMemoryBarrier barrier =
2162 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2171 VK_QUEUE_FAMILY_IGNORED,
2172 VK_QUEUE_FAMILY_IGNORED,
2176 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2182 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2185 void ImageTransition::verify (VerifyContext& context, size_t)
2187 context.getReference().setUndefined(0, (size_t)m_imageMemorySize);
2190 class FillBuffer : public CmdCommand
2193 FillBuffer (deUint32 value) : m_value(value) {}
2194 ~FillBuffer (void) {}
2195 const char* getName (void) const { return "FillBuffer"; }
2197 void logSubmit (TestLog& log, size_t commandIndex) const;
2198 void submit (SubmitContext& context);
2199 void verify (VerifyContext& context, size_t commandIndex);
2202 const deUint32 m_value;
2203 vk::VkDeviceSize m_bufferSize;
2206 void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2208 log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
2211 void FillBuffer::submit (SubmitContext& context)
2213 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2214 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2215 const vk::VkBuffer buffer = context.getBuffer();
2216 const vk::VkDeviceSize sizeMask = ~(0x3ull); // \note Round down to multiple of 4
2218 m_bufferSize = sizeMask & context.getBufferSize();
2219 vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
2222 void FillBuffer::verify (VerifyContext& context, size_t)
2224 ReferenceMemory& reference = context.getReference();
2226 for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
2228 #if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2229 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
2231 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
2236 class UpdateBuffer : public CmdCommand
2239 UpdateBuffer (deUint32 seed) : m_seed(seed) {}
2240 ~UpdateBuffer (void) {}
2241 const char* getName (void) const { return "UpdateBuffer"; }
2243 void logSubmit (TestLog& log, size_t commandIndex) const;
2244 void submit (SubmitContext& context);
2245 void verify (VerifyContext& context, size_t commandIndex);
2248 const deUint32 m_seed;
2249 vk::VkDeviceSize m_bufferSize;
2252 void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2254 log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
2257 void UpdateBuffer::submit (SubmitContext& context)
2259 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2260 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2261 const vk::VkBuffer buffer = context.getBuffer();
2262 const size_t blockSize = 65536;
2263 std::vector<deUint8> data (blockSize, 0);
2264 de::Random rng (m_seed);
2266 m_bufferSize = context.getBufferSize();
2268 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2270 for (size_t ndx = 0; ndx < data.size(); ndx++)
2271 data[ndx] = rng.getUint8();
2273 if (m_bufferSize - updated > blockSize)
2274 vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
2276 vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
2280 void UpdateBuffer::verify (VerifyContext& context, size_t)
2282 ReferenceMemory& reference = context.getReference();
2283 const size_t blockSize = 65536;
2284 vector<deUint8> data (blockSize, 0);
2285 de::Random rng (m_seed);
2287 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2289 for (size_t ndx = 0; ndx < data.size(); ndx++)
2290 data[ndx] = rng.getUint8();
2292 if (m_bufferSize - updated > blockSize)
2293 reference.setData(updated, blockSize, &data[0]);
2295 reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
2299 class BufferCopyToBuffer : public CmdCommand
2302 BufferCopyToBuffer (void) {}
2303 ~BufferCopyToBuffer (void) {}
2304 const char* getName (void) const { return "BufferCopyToBuffer"; }
2306 void logPrepare (TestLog& log, size_t commandIndex) const;
2307 void prepare (PrepareContext& context);
2308 void logSubmit (TestLog& log, size_t commandIndex) const;
2309 void submit (SubmitContext& context);
2310 void verify (VerifyContext& context, size_t commandIndex);
2313 vk::VkDeviceSize m_bufferSize;
2314 vk::Move<vk::VkBuffer> m_dstBuffer;
2315 vk::Move<vk::VkDeviceMemory> m_memory;
2318 void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2320 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
2323 void BufferCopyToBuffer::prepare (PrepareContext& context)
2325 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2326 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2327 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2328 const vk::VkDevice device = context.getContext().getDevice();
2329 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2331 m_bufferSize = context.getBufferSize();
2333 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2334 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2337 void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2339 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
2342 void BufferCopyToBuffer::submit (SubmitContext& context)
2344 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2345 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2346 const vk::VkBufferCopy range =
2352 vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
2355 void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2357 tcu::ResultCollector& resultCollector (context.getResultCollector());
2358 ReferenceMemory& reference (context.getReference());
2359 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2360 const vk::VkDevice device = context.getContext().getDevice();
2361 const vk::VkQueue queue = context.getContext().getQueue();
2362 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2363 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2364 const vk::VkBufferMemoryBarrier barrier =
2366 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2369 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2370 vk::VK_ACCESS_HOST_READ_BIT,
2372 VK_QUEUE_FAMILY_IGNORED,
2373 VK_QUEUE_FAMILY_IGNORED,
2379 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2381 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2382 queueRun(vkd, queue, *commandBuffer);
2385 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2388 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2391 const deUint8* const data = (const deUint8*)ptr;
2393 for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
2395 if (reference.isDefined(pos))
2397 if (data[pos] != reference.get(pos))
2399 resultCollector.fail(
2400 de::toString(commandIndex) + ":" + getName()
2401 + " Result differs from reference, Expected: "
2402 + de::toString(tcu::toHex<8>(reference.get(pos)))
2404 + de::toString(tcu::toHex<8>(data[pos]))
2406 + de::toString(pos));
2413 vkd.unmapMemory(device, *m_memory);
2416 context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
2420 class BufferCopyFromBuffer : public CmdCommand
2423 BufferCopyFromBuffer (deUint32 seed) : m_seed(seed) {}
2424 ~BufferCopyFromBuffer (void) {}
2425 const char* getName (void) const { return "BufferCopyFromBuffer"; }
2427 void logPrepare (TestLog& log, size_t commandIndex) const;
2428 void prepare (PrepareContext& context);
2429 void logSubmit (TestLog& log, size_t commandIndex) const;
2430 void submit (SubmitContext& context);
2431 void verify (VerifyContext& context, size_t commandIndex);
2434 const deUint32 m_seed;
2435 vk::VkDeviceSize m_bufferSize;
2436 vk::Move<vk::VkBuffer> m_srcBuffer;
2437 vk::Move<vk::VkDeviceMemory> m_memory;
2440 void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2442 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
2445 void BufferCopyFromBuffer::prepare (PrepareContext& context)
2447 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2448 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2449 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2450 const vk::VkDevice device = context.getContext().getDevice();
2451 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2453 m_bufferSize = context.getBufferSize();
2454 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2455 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2458 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2459 de::Random rng (m_seed);
2462 deUint8* const data = (deUint8*)ptr;
2464 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2465 data[ndx] = rng.getUint8();
2468 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2469 vkd.unmapMemory(device, *m_memory);
2473 void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2475 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
2478 void BufferCopyFromBuffer::submit (SubmitContext& context)
2480 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2481 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2482 const vk::VkBufferCopy range =
2488 vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
2491 void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
2493 ReferenceMemory& reference (context.getReference());
2494 de::Random rng (m_seed);
2496 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2497 reference.set(ndx, rng.getUint8());
2500 class BufferCopyToImage : public CmdCommand
2503 BufferCopyToImage (void) {}
2504 ~BufferCopyToImage (void) {}
2505 const char* getName (void) const { return "BufferCopyToImage"; }
2507 void logPrepare (TestLog& log, size_t commandIndex) const;
2508 void prepare (PrepareContext& context);
2509 void logSubmit (TestLog& log, size_t commandIndex) const;
2510 void submit (SubmitContext& context);
2511 void verify (VerifyContext& context, size_t commandIndex);
2514 deInt32 m_imageWidth;
2515 deInt32 m_imageHeight;
2516 vk::Move<vk::VkImage> m_dstImage;
2517 vk::Move<vk::VkDeviceMemory> m_memory;
2520 void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
2522 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
2525 void BufferCopyToImage::prepare (PrepareContext& context)
2527 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2528 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2529 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2530 const vk::VkDevice device = context.getContext().getDevice();
2531 const vk::VkQueue queue = context.getContext().getQueue();
2532 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2533 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2534 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2536 m_imageWidth = imageSize[0];
2537 m_imageHeight = imageSize[1];
2540 const vk::VkImageCreateInfo createInfo =
2542 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2546 vk::VK_IMAGE_TYPE_2D,
2547 vk::VK_FORMAT_R8G8B8A8_UNORM,
2549 (deUint32)m_imageWidth,
2550 (deUint32)m_imageHeight,
2553 1, 1, // mipLevels, arrayLayers
2554 vk::VK_SAMPLE_COUNT_1_BIT,
2556 vk::VK_IMAGE_TILING_OPTIMAL,
2557 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2558 vk::VK_SHARING_MODE_EXCLUSIVE,
2560 (deUint32)queueFamilies.size(),
2562 vk::VK_IMAGE_LAYOUT_UNDEFINED
2565 m_dstImage = vk::createImage(vkd, device, &createInfo);
2568 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
2571 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2572 const vk::VkImageMemoryBarrier barrier =
2574 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2578 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2580 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2581 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2583 VK_QUEUE_FAMILY_IGNORED,
2584 VK_QUEUE_FAMILY_IGNORED,
2588 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2590 1, // Mip level count
2596 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2598 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2599 queueRun(vkd, queue, *commandBuffer);
2603 void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
2605 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
2608 void BufferCopyToImage::submit (SubmitContext& context)
2610 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2611 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2612 const vk::VkBufferImageCopy region =
2617 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2624 (deUint32)m_imageWidth,
2625 (deUint32)m_imageHeight,
2630 vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2633 void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
2635 tcu::ResultCollector& resultCollector (context.getResultCollector());
2636 ReferenceMemory& reference (context.getReference());
2637 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2638 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2639 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2640 const vk::VkDevice device = context.getContext().getDevice();
2641 const vk::VkQueue queue = context.getContext().getQueue();
2642 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2643 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2644 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2645 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2646 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2648 const vk::VkImageMemoryBarrier imageBarrier =
2650 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2653 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2654 vk::VK_ACCESS_TRANSFER_READ_BIT,
2656 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2657 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2659 VK_QUEUE_FAMILY_IGNORED,
2660 VK_QUEUE_FAMILY_IGNORED,
2664 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2666 1, // Mip level count
2671 const vk::VkBufferMemoryBarrier bufferBarrier =
2673 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2676 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2677 vk::VK_ACCESS_HOST_READ_BIT,
2679 VK_QUEUE_FAMILY_IGNORED,
2680 VK_QUEUE_FAMILY_IGNORED,
2686 const vk::VkBufferImageCopy region =
2691 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2698 (deUint32)m_imageWidth,
2699 (deUint32)m_imageHeight,
2704 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
2705 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
2706 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2709 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2710 queueRun(vkd, queue, *commandBuffer);
2713 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2715 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2718 const deUint8* const data = (const deUint8*)ptr;
2720 for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
2722 if (reference.isDefined(pos))
2724 if (data[pos] != reference.get(pos))
2726 resultCollector.fail(
2727 de::toString(commandIndex) + ":" + getName()
2728 + " Result differs from reference, Expected: "
2729 + de::toString(tcu::toHex<8>(reference.get(pos)))
2731 + de::toString(tcu::toHex<8>(data[pos]))
2733 + de::toString(pos));
2740 vkd.unmapMemory(device, *memory);
2744 class BufferCopyFromImage : public CmdCommand
2747 BufferCopyFromImage (deUint32 seed) : m_seed(seed) {}
2748 ~BufferCopyFromImage (void) {}
2749 const char* getName (void) const { return "BufferCopyFromImage"; }
2751 void logPrepare (TestLog& log, size_t commandIndex) const;
2752 void prepare (PrepareContext& context);
2753 void logSubmit (TestLog& log, size_t commandIndex) const;
2754 void submit (SubmitContext& context);
2755 void verify (VerifyContext& context, size_t commandIndex);
2758 const deUint32 m_seed;
2759 deInt32 m_imageWidth;
2760 deInt32 m_imageHeight;
2761 vk::Move<vk::VkImage> m_srcImage;
2762 vk::Move<vk::VkDeviceMemory> m_memory;
2765 void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
2767 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
2770 void BufferCopyFromImage::prepare (PrepareContext& context)
2772 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2773 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2774 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2775 const vk::VkDevice device = context.getContext().getDevice();
2776 const vk::VkQueue queue = context.getContext().getQueue();
2777 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2778 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2779 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2781 m_imageWidth = imageSize[0];
2782 m_imageHeight = imageSize[1];
2785 const vk::VkImageCreateInfo createInfo =
2787 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2791 vk::VK_IMAGE_TYPE_2D,
2792 vk::VK_FORMAT_R8G8B8A8_UNORM,
2794 (deUint32)m_imageWidth,
2795 (deUint32)m_imageHeight,
2798 1, 1, // mipLevels, arrayLayers
2799 vk::VK_SAMPLE_COUNT_1_BIT,
2801 vk::VK_IMAGE_TILING_OPTIMAL,
2802 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2803 vk::VK_SHARING_MODE_EXCLUSIVE,
2805 (deUint32)queueFamilies.size(),
2807 vk::VK_IMAGE_LAYOUT_UNDEFINED
2810 m_srcImage = vk::createImage(vkd, device, &createInfo);
2813 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
2816 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2817 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2818 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2819 const vk::VkImageMemoryBarrier preImageBarrier =
2821 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2825 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2827 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2828 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2830 VK_QUEUE_FAMILY_IGNORED,
2831 VK_QUEUE_FAMILY_IGNORED,
2835 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2837 1, // Mip level count
2842 const vk::VkImageMemoryBarrier postImageBarrier =
2844 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2847 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2850 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2851 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2853 VK_QUEUE_FAMILY_IGNORED,
2854 VK_QUEUE_FAMILY_IGNORED,
2858 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2860 1, // Mip level count
2865 const vk::VkBufferImageCopy region =
2870 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2877 (deUint32)m_imageWidth,
2878 (deUint32)m_imageHeight,
2884 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2885 de::Random rng (m_seed);
2888 deUint8* const data = (deUint8*)ptr;
2890 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2891 data[ndx] = rng.getUint8();
2894 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2895 vkd.unmapMemory(device, *memory);
2898 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
2899 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2900 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
2902 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2903 queueRun(vkd, queue, *commandBuffer);
2907 void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
2909 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
2912 void BufferCopyFromImage::submit (SubmitContext& context)
2914 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2915 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2916 const vk::VkBufferImageCopy region =
2921 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2928 (deUint32)m_imageWidth,
2929 (deUint32)m_imageHeight,
2934 vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, ®ion);
2937 void BufferCopyFromImage::verify (VerifyContext& context, size_t)
2939 ReferenceMemory& reference (context.getReference());
2940 de::Random rng (m_seed);
2942 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2943 reference.set(ndx, rng.getUint8());
2946 class ImageCopyToBuffer : public CmdCommand
2949 ImageCopyToBuffer (vk::VkImageLayout imageLayout) : m_imageLayout (imageLayout) {}
2950 ~ImageCopyToBuffer (void) {}
2951 const char* getName (void) const { return "BufferCopyToImage"; }
2953 void logPrepare (TestLog& log, size_t commandIndex) const;
2954 void prepare (PrepareContext& context);
2955 void logSubmit (TestLog& log, size_t commandIndex) const;
2956 void submit (SubmitContext& context);
2957 void verify (VerifyContext& context, size_t commandIndex);
2960 vk::VkImageLayout m_imageLayout;
2961 vk::VkDeviceSize m_bufferSize;
2962 vk::Move<vk::VkBuffer> m_dstBuffer;
2963 vk::Move<vk::VkDeviceMemory> m_memory;
2964 vk::VkDeviceSize m_imageMemorySize;
2965 deInt32 m_imageWidth;
2966 deInt32 m_imageHeight;
2969 void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2971 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
2974 void ImageCopyToBuffer::prepare (PrepareContext& context)
2976 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2977 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2978 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2979 const vk::VkDevice device = context.getContext().getDevice();
2980 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2982 m_imageWidth = context.getImageWidth();
2983 m_imageHeight = context.getImageHeight();
2984 m_bufferSize = 4 * m_imageWidth * m_imageHeight;
2985 m_imageMemorySize = context.getImageMemorySize();
2986 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2987 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2990 void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2992 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
2995 void ImageCopyToBuffer::submit (SubmitContext& context)
2997 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2998 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2999 const vk::VkBufferImageCopy region =
3004 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3011 (deUint32)m_imageWidth,
3012 (deUint32)m_imageHeight,
3017 vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), m_imageLayout, *m_dstBuffer, 1, ®ion);
3020 void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
3022 tcu::ResultCollector& resultCollector (context.getResultCollector());
3023 ReferenceMemory& reference (context.getReference());
3024 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3025 const vk::VkDevice device = context.getContext().getDevice();
3026 const vk::VkQueue queue = context.getContext().getQueue();
3027 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3028 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3029 const vk::VkBufferMemoryBarrier barrier =
3031 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3034 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3035 vk::VK_ACCESS_HOST_READ_BIT,
3037 VK_QUEUE_FAMILY_IGNORED,
3038 VK_QUEUE_FAMILY_IGNORED,
3044 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3046 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3047 queueRun(vkd, queue, *commandBuffer);
3049 reference.setUndefined(0, (size_t)m_imageMemorySize);
3051 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3052 const ConstPixelBufferAccess referenceImage (context.getReferenceImage().getAccess());
3053 const ConstPixelBufferAccess resultImage (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
3055 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3057 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3058 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3060 vkd.unmapMemory(device, *m_memory);
3064 class ImageCopyFromBuffer : public CmdCommand
3067 ImageCopyFromBuffer (deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3068 ~ImageCopyFromBuffer (void) {}
3069 const char* getName (void) const { return "ImageCopyFromBuffer"; }
3071 void logPrepare (TestLog& log, size_t commandIndex) const;
3072 void prepare (PrepareContext& context);
3073 void logSubmit (TestLog& log, size_t commandIndex) const;
3074 void submit (SubmitContext& context);
3075 void verify (VerifyContext& context, size_t commandIndex);
3078 const deUint32 m_seed;
3079 const vk::VkImageLayout m_imageLayout;
3080 deInt32 m_imageWidth;
3081 deInt32 m_imageHeight;
3082 vk::VkDeviceSize m_imageMemorySize;
3083 vk::VkDeviceSize m_bufferSize;
3084 vk::Move<vk::VkBuffer> m_srcBuffer;
3085 vk::Move<vk::VkDeviceMemory> m_memory;
3088 void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
3090 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
3093 void ImageCopyFromBuffer::prepare (PrepareContext& context)
3095 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3096 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3097 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3098 const vk::VkDevice device = context.getContext().getDevice();
3099 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3101 m_imageWidth = context.getImageHeight();
3102 m_imageHeight = context.getImageWidth();
3103 m_imageMemorySize = context.getImageMemorySize();
3104 m_bufferSize = m_imageWidth * m_imageHeight * 4;
3105 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
3106 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
3109 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3110 de::Random rng (m_seed);
3113 deUint8* const data = (deUint8*)ptr;
3115 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
3116 data[ndx] = rng.getUint8();
3119 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3120 vkd.unmapMemory(device, *m_memory);
3124 void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
3126 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
3129 void ImageCopyFromBuffer::submit (SubmitContext& context)
3131 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3132 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3133 const vk::VkBufferImageCopy region =
3138 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3145 (deUint32)m_imageWidth,
3146 (deUint32)m_imageHeight,
3151 vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), m_imageLayout, 1, ®ion);
3154 void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
3156 ReferenceMemory& reference (context.getReference());
3157 de::Random rng (m_seed);
3159 reference.setUndefined(0, (size_t)m_imageMemorySize);
3162 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3164 for (deInt32 y = 0; y < m_imageHeight; y++)
3165 for (deInt32 x = 0; x < m_imageWidth; x++)
3167 const deUint8 r8 = rng.getUint8();
3168 const deUint8 g8 = rng.getUint8();
3169 const deUint8 b8 = rng.getUint8();
3170 const deUint8 a8 = rng.getUint8();
3172 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3177 class ImageCopyFromImage : public CmdCommand
3180 ImageCopyFromImage (deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3181 ~ImageCopyFromImage (void) {}
3182 const char* getName (void) const { return "ImageCopyFromImage"; }
3184 void logPrepare (TestLog& log, size_t commandIndex) const;
3185 void prepare (PrepareContext& context);
3186 void logSubmit (TestLog& log, size_t commandIndex) const;
3187 void submit (SubmitContext& context);
3188 void verify (VerifyContext& context, size_t commandIndex);
3191 const deUint32 m_seed;
3192 const vk::VkImageLayout m_imageLayout;
3193 deInt32 m_imageWidth;
3194 deInt32 m_imageHeight;
3195 vk::VkDeviceSize m_imageMemorySize;
3196 vk::Move<vk::VkImage> m_srcImage;
3197 vk::Move<vk::VkDeviceMemory> m_memory;
3200 void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3202 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
3205 void ImageCopyFromImage::prepare (PrepareContext& context)
3207 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3208 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3209 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3210 const vk::VkDevice device = context.getContext().getDevice();
3211 const vk::VkQueue queue = context.getContext().getQueue();
3212 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3213 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3215 m_imageWidth = context.getImageWidth();
3216 m_imageHeight = context.getImageHeight();
3217 m_imageMemorySize = context.getImageMemorySize();
3220 const vk::VkImageCreateInfo createInfo =
3222 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3226 vk::VK_IMAGE_TYPE_2D,
3227 vk::VK_FORMAT_R8G8B8A8_UNORM,
3229 (deUint32)m_imageWidth,
3230 (deUint32)m_imageHeight,
3233 1, 1, // mipLevels, arrayLayers
3234 vk::VK_SAMPLE_COUNT_1_BIT,
3236 vk::VK_IMAGE_TILING_OPTIMAL,
3237 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3238 vk::VK_SHARING_MODE_EXCLUSIVE,
3240 (deUint32)queueFamilies.size(),
3242 vk::VK_IMAGE_LAYOUT_UNDEFINED
3245 m_srcImage = vk::createImage(vkd, device, &createInfo);
3248 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3251 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3252 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3253 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3254 const vk::VkImageMemoryBarrier preImageBarrier =
3256 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3260 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3262 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3263 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3265 VK_QUEUE_FAMILY_IGNORED,
3266 VK_QUEUE_FAMILY_IGNORED,
3270 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3272 1, // Mip level count
3277 const vk::VkImageMemoryBarrier postImageBarrier =
3279 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3282 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3285 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3286 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3288 VK_QUEUE_FAMILY_IGNORED,
3289 VK_QUEUE_FAMILY_IGNORED,
3293 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3295 1, // Mip level count
3300 const vk::VkBufferImageCopy region =
3305 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3312 (deUint32)m_imageWidth,
3313 (deUint32)m_imageHeight,
3319 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3320 de::Random rng (m_seed);
3323 deUint8* const data = (deUint8*)ptr;
3325 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
3326 data[ndx] = rng.getUint8();
3329 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3330 vkd.unmapMemory(device, *memory);
3333 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3334 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3335 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3337 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3338 queueRun(vkd, queue, *commandBuffer);
3342 void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3344 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
3347 void ImageCopyFromImage::submit (SubmitContext& context)
3349 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3350 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3351 const vk::VkImageCopy region =
3354 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3362 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3369 (deUint32)m_imageWidth,
3370 (deUint32)m_imageHeight,
3375 vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, ®ion);
3378 void ImageCopyFromImage::verify (VerifyContext& context, size_t)
3380 ReferenceMemory& reference (context.getReference());
3381 de::Random rng (m_seed);
3383 reference.setUndefined(0, (size_t)m_imageMemorySize);
3386 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3388 for (deInt32 y = 0; y < m_imageHeight; y++)
3389 for (deInt32 x = 0; x < m_imageWidth; x++)
3391 const deUint8 r8 = rng.getUint8();
3392 const deUint8 g8 = rng.getUint8();
3393 const deUint8 b8 = rng.getUint8();
3394 const deUint8 a8 = rng.getUint8();
3396 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3401 class ImageCopyToImage : public CmdCommand
3404 ImageCopyToImage (vk::VkImageLayout imageLayout) : m_imageLayout(imageLayout) {}
3405 ~ImageCopyToImage (void) {}
3406 const char* getName (void) const { return "ImageCopyToImage"; }
3408 void logPrepare (TestLog& log, size_t commandIndex) const;
3409 void prepare (PrepareContext& context);
3410 void logSubmit (TestLog& log, size_t commandIndex) const;
3411 void submit (SubmitContext& context);
3412 void verify (VerifyContext& context, size_t commandIndex);
3415 const vk::VkImageLayout m_imageLayout;
3416 deInt32 m_imageWidth;
3417 deInt32 m_imageHeight;
3418 vk::VkDeviceSize m_imageMemorySize;
3419 vk::Move<vk::VkImage> m_dstImage;
3420 vk::Move<vk::VkDeviceMemory> m_memory;
3423 void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
3425 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
3428 void ImageCopyToImage::prepare (PrepareContext& context)
3430 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3431 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3432 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3433 const vk::VkDevice device = context.getContext().getDevice();
3434 const vk::VkQueue queue = context.getContext().getQueue();
3435 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3436 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3438 m_imageWidth = context.getImageWidth();
3439 m_imageHeight = context.getImageHeight();
3440 m_imageMemorySize = context.getImageMemorySize();
3443 const vk::VkImageCreateInfo createInfo =
3445 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3449 vk::VK_IMAGE_TYPE_2D,
3450 vk::VK_FORMAT_R8G8B8A8_UNORM,
3452 (deUint32)m_imageWidth,
3453 (deUint32)m_imageHeight,
3456 1, 1, // mipLevels, arrayLayers
3457 vk::VK_SAMPLE_COUNT_1_BIT,
3459 vk::VK_IMAGE_TILING_OPTIMAL,
3460 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3461 vk::VK_SHARING_MODE_EXCLUSIVE,
3463 (deUint32)queueFamilies.size(),
3465 vk::VK_IMAGE_LAYOUT_UNDEFINED
3468 m_dstImage = vk::createImage(vkd, device, &createInfo);
3471 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3474 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3475 const vk::VkImageMemoryBarrier barrier =
3477 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3481 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3483 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3484 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3486 VK_QUEUE_FAMILY_IGNORED,
3487 VK_QUEUE_FAMILY_IGNORED,
3491 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3493 1, // Mip level count
3499 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
3501 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3502 queueRun(vkd, queue, *commandBuffer);
3506 void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
3508 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
3511 void ImageCopyToImage::submit (SubmitContext& context)
3513 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3514 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3515 const vk::VkImageCopy region =
3518 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3526 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3533 (deUint32)m_imageWidth,
3534 (deUint32)m_imageHeight,
3539 vkd.cmdCopyImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3542 void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
3544 tcu::ResultCollector& resultCollector (context.getResultCollector());
3545 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3546 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3547 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3548 const vk::VkDevice device = context.getContext().getDevice();
3549 const vk::VkQueue queue = context.getContext().getQueue();
3550 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3551 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3552 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3553 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3554 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3556 const vk::VkImageMemoryBarrier imageBarrier =
3558 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3561 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3562 vk::VK_ACCESS_TRANSFER_READ_BIT,
3564 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3565 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3567 VK_QUEUE_FAMILY_IGNORED,
3568 VK_QUEUE_FAMILY_IGNORED,
3572 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3574 1, // Mip level count
3579 const vk::VkBufferMemoryBarrier bufferBarrier =
3581 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3584 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3585 vk::VK_ACCESS_HOST_READ_BIT,
3587 VK_QUEUE_FAMILY_IGNORED,
3588 VK_QUEUE_FAMILY_IGNORED,
3593 const vk::VkBufferImageCopy region =
3598 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3605 (deUint32)m_imageWidth,
3606 (deUint32)m_imageHeight,
3611 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
3612 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
3613 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3616 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3617 queueRun(vkd, queue, *commandBuffer);
3620 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3622 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3625 const deUint8* const data = (const deUint8*)ptr;
3626 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
3627 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3629 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3630 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3633 vkd.unmapMemory(device, *memory);
3643 class ImageBlitFromImage : public CmdCommand
3646 ImageBlitFromImage (deUint32 seed, BlitScale scale, vk::VkImageLayout imageLayout) : m_seed(seed), m_scale(scale), m_imageLayout(imageLayout) {}
3647 ~ImageBlitFromImage (void) {}
3648 const char* getName (void) const { return "ImageBlitFromImage"; }
3650 void logPrepare (TestLog& log, size_t commandIndex) const;
3651 void prepare (PrepareContext& context);
3652 void logSubmit (TestLog& log, size_t commandIndex) const;
3653 void submit (SubmitContext& context);
3654 void verify (VerifyContext& context, size_t commandIndex);
3657 const deUint32 m_seed;
3658 const BlitScale m_scale;
3659 const vk::VkImageLayout m_imageLayout;
3660 deInt32 m_imageWidth;
3661 deInt32 m_imageHeight;
3662 vk::VkDeviceSize m_imageMemorySize;
3663 deInt32 m_srcImageWidth;
3664 deInt32 m_srcImageHeight;
3665 vk::Move<vk::VkImage> m_srcImage;
3666 vk::Move<vk::VkDeviceMemory> m_memory;
3669 void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3671 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
3674 void ImageBlitFromImage::prepare (PrepareContext& context)
3676 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3677 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3678 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3679 const vk::VkDevice device = context.getContext().getDevice();
3680 const vk::VkQueue queue = context.getContext().getQueue();
3681 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3682 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3684 m_imageWidth = context.getImageWidth();
3685 m_imageHeight = context.getImageHeight();
3686 m_imageMemorySize = context.getImageMemorySize();
3688 if (m_scale == BLIT_SCALE_10)
3690 m_srcImageWidth = m_imageWidth;
3691 m_srcImageHeight = m_imageHeight;
3693 else if (m_scale == BLIT_SCALE_20)
3695 m_srcImageWidth = m_imageWidth / 2;
3696 m_srcImageHeight = m_imageHeight / 2;
3699 DE_FATAL("Unsupported scale");
3702 const vk::VkImageCreateInfo createInfo =
3704 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3708 vk::VK_IMAGE_TYPE_2D,
3709 vk::VK_FORMAT_R8G8B8A8_UNORM,
3711 (deUint32)m_srcImageWidth,
3712 (deUint32)m_srcImageHeight,
3715 1, 1, // mipLevels, arrayLayers
3716 vk::VK_SAMPLE_COUNT_1_BIT,
3718 vk::VK_IMAGE_TILING_OPTIMAL,
3719 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3720 vk::VK_SHARING_MODE_EXCLUSIVE,
3722 (deUint32)queueFamilies.size(),
3724 vk::VK_IMAGE_LAYOUT_UNDEFINED
3727 m_srcImage = vk::createImage(vkd, device, &createInfo);
3730 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3733 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3734 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3735 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3736 const vk::VkImageMemoryBarrier preImageBarrier =
3738 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3742 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3744 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3745 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3747 VK_QUEUE_FAMILY_IGNORED,
3748 VK_QUEUE_FAMILY_IGNORED,
3752 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3754 1, // Mip level count
3759 const vk::VkImageMemoryBarrier postImageBarrier =
3761 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3764 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3767 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3768 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3770 VK_QUEUE_FAMILY_IGNORED,
3771 VK_QUEUE_FAMILY_IGNORED,
3775 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3777 1, // Mip level count
3782 const vk::VkBufferImageCopy region =
3787 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3794 (deUint32)m_srcImageWidth,
3795 (deUint32)m_srcImageHeight,
3801 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
3802 de::Random rng (m_seed);
3805 deUint8* const data = (deUint8*)ptr;
3807 for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
3808 data[ndx] = rng.getUint8();
3811 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
3812 vkd.unmapMemory(device, *memory);
3815 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3816 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3817 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3819 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3820 queueRun(vkd, queue, *commandBuffer);
3824 void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3826 log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
3829 void ImageBlitFromImage::submit (SubmitContext& context)
3831 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3832 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3833 const vk::VkImageBlit region =
3837 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3853 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3867 vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, ®ion, vk::VK_FILTER_NEAREST);
3870 void ImageBlitFromImage::verify (VerifyContext& context, size_t)
3872 ReferenceMemory& reference (context.getReference());
3873 de::Random rng (m_seed);
3875 reference.setUndefined(0, (size_t)m_imageMemorySize);
3878 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3880 if (m_scale == BLIT_SCALE_10)
3882 for (deInt32 y = 0; y < m_imageHeight; y++)
3883 for (deInt32 x = 0; x < m_imageWidth; x++)
3885 const deUint8 r8 = rng.getUint8();
3886 const deUint8 g8 = rng.getUint8();
3887 const deUint8 b8 = rng.getUint8();
3888 const deUint8 a8 = rng.getUint8();
3890 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3893 else if (m_scale == BLIT_SCALE_20)
3895 tcu::TextureLevel source (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
3896 const float xscale = ((float)m_srcImageWidth) / (float)m_imageWidth;
3897 const float yscale = ((float)m_srcImageHeight) / (float)m_imageHeight;
3899 for (deInt32 y = 0; y < m_srcImageHeight; y++)
3900 for (deInt32 x = 0; x < m_srcImageWidth; x++)
3902 const deUint8 r8 = rng.getUint8();
3903 const deUint8 g8 = rng.getUint8();
3904 const deUint8 b8 = rng.getUint8();
3905 const deUint8 a8 = rng.getUint8();
3907 source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
3910 for (deInt32 y = 0; y < m_imageHeight; y++)
3911 for (deInt32 x = 0; x < m_imageWidth; x++)
3912 refAccess.setPixel(source.getAccess().getPixelUint(int(float(x) * xscale), int(float(y) * yscale)), x, y);
3915 DE_FATAL("Unsupported scale");
3919 class ImageBlitToImage : public CmdCommand
3922 ImageBlitToImage (BlitScale scale, vk::VkImageLayout imageLayout) : m_scale(scale), m_imageLayout(imageLayout) {}
3923 ~ImageBlitToImage (void) {}
3924 const char* getName (void) const { return "ImageBlitToImage"; }
3926 void logPrepare (TestLog& log, size_t commandIndex) const;
3927 void prepare (PrepareContext& context);
3928 void logSubmit (TestLog& log, size_t commandIndex) const;
3929 void submit (SubmitContext& context);
3930 void verify (VerifyContext& context, size_t commandIndex);
3933 const BlitScale m_scale;
3934 const vk::VkImageLayout m_imageLayout;
3935 deInt32 m_imageWidth;
3936 deInt32 m_imageHeight;
3937 vk::VkDeviceSize m_imageMemorySize;
3938 deInt32 m_dstImageWidth;
3939 deInt32 m_dstImageHeight;
3940 vk::Move<vk::VkImage> m_dstImage;
3941 vk::Move<vk::VkDeviceMemory> m_memory;
3944 void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
3946 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
3949 void ImageBlitToImage::prepare (PrepareContext& context)
3951 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3952 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3953 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3954 const vk::VkDevice device = context.getContext().getDevice();
3955 const vk::VkQueue queue = context.getContext().getQueue();
3956 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3957 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3959 m_imageWidth = context.getImageWidth();
3960 m_imageHeight = context.getImageHeight();
3961 m_imageMemorySize = context.getImageMemorySize();
3963 if (m_scale == BLIT_SCALE_10)
3965 m_dstImageWidth = context.getImageWidth();
3966 m_dstImageHeight = context.getImageHeight();
3968 else if (m_scale == BLIT_SCALE_20)
3970 m_dstImageWidth = context.getImageWidth() * 2;
3971 m_dstImageHeight = context.getImageHeight() * 2;
3974 DE_FATAL("Unsupportd blit scale");
3977 const vk::VkImageCreateInfo createInfo =
3979 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3983 vk::VK_IMAGE_TYPE_2D,
3984 vk::VK_FORMAT_R8G8B8A8_UNORM,
3986 (deUint32)m_dstImageWidth,
3987 (deUint32)m_dstImageHeight,
3990 1, 1, // mipLevels, arrayLayers
3991 vk::VK_SAMPLE_COUNT_1_BIT,
3993 vk::VK_IMAGE_TILING_OPTIMAL,
3994 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3995 vk::VK_SHARING_MODE_EXCLUSIVE,
3997 (deUint32)queueFamilies.size(),
3999 vk::VK_IMAGE_LAYOUT_UNDEFINED
4002 m_dstImage = vk::createImage(vkd, device, &createInfo);
4005 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
4008 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4009 const vk::VkImageMemoryBarrier barrier =
4011 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4015 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4017 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4018 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4020 VK_QUEUE_FAMILY_IGNORED,
4021 VK_QUEUE_FAMILY_IGNORED,
4025 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4027 1, // Mip level count
4033 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
4035 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4036 queueRun(vkd, queue, *commandBuffer);
4040 void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
4042 log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
4045 void ImageBlitToImage::submit (SubmitContext& context)
4047 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4048 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4049 const vk::VkImageBlit region =
4053 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4069 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4083 vkd.cmdBlitImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion, vk::VK_FILTER_NEAREST);
4086 void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
4088 tcu::ResultCollector& resultCollector (context.getResultCollector());
4089 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4090 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4091 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4092 const vk::VkDevice device = context.getContext().getDevice();
4093 const vk::VkQueue queue = context.getContext().getQueue();
4094 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4095 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4096 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4097 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4098 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4100 const vk::VkImageMemoryBarrier imageBarrier =
4102 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4105 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4106 vk::VK_ACCESS_TRANSFER_READ_BIT,
4108 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4109 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4111 VK_QUEUE_FAMILY_IGNORED,
4112 VK_QUEUE_FAMILY_IGNORED,
4116 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4118 1, // Mip level count
4123 const vk::VkBufferMemoryBarrier bufferBarrier =
4125 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4128 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4129 vk::VK_ACCESS_HOST_READ_BIT,
4131 VK_QUEUE_FAMILY_IGNORED,
4132 VK_QUEUE_FAMILY_IGNORED,
4137 const vk::VkBufferImageCopy region =
4142 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4149 (deUint32)m_dstImageWidth,
4150 (deUint32)m_dstImageHeight,
4155 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4156 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4157 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4160 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4161 queueRun(vkd, queue, *commandBuffer);
4164 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
4166 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_dstImageWidth * m_dstImageHeight);
4168 if (m_scale == BLIT_SCALE_10)
4170 const deUint8* const data = (const deUint8*)ptr;
4171 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4172 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4174 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4175 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4177 else if (m_scale == BLIT_SCALE_20)
4179 const deUint8* const data = (const deUint8*)ptr;
4180 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4181 tcu::TextureLevel reference (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
4184 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4186 for (deInt32 y = 0; y < m_dstImageHeight; y++)
4187 for (deInt32 x = 0; x < m_dstImageWidth; x++)
4189 reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
4193 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4194 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4197 DE_FATAL("Unknown scale");
4199 vkd.unmapMemory(device, *memory);
4203 class PrepareRenderPassContext
4206 PrepareRenderPassContext (PrepareContext& context,
4207 vk::VkRenderPass renderPass,
4208 vk::VkFramebuffer framebuffer,
4209 deInt32 targetWidth,
4210 deInt32 targetHeight)
4211 : m_context (context)
4212 , m_renderPass (renderPass)
4213 , m_framebuffer (framebuffer)
4214 , m_targetWidth (targetWidth)
4215 , m_targetHeight (targetHeight)
4219 const Memory& getMemory (void) const { return m_context.getMemory(); }
4220 const Context& getContext (void) const { return m_context.getContext(); }
4221 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
4223 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
4224 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
4226 vk::VkImage getImage (void) const { return m_context.getImage(); }
4227 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
4228 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
4229 vk::VkImageLayout getImageLayout (void) const { return m_context.getImageLayout(); }
4231 deInt32 getTargetWidth (void) const { return m_targetWidth; }
4232 deInt32 getTargetHeight (void) const { return m_targetHeight; }
4234 vk::VkRenderPass getRenderPass (void) const { return m_renderPass; }
4237 PrepareContext& m_context;
4238 const vk::VkRenderPass m_renderPass;
4239 const vk::VkFramebuffer m_framebuffer;
4240 const deInt32 m_targetWidth;
4241 const deInt32 m_targetHeight;
4244 class VerifyRenderPassContext
4247 VerifyRenderPassContext (VerifyContext& context,
4248 deInt32 targetWidth,
4249 deInt32 targetHeight)
4250 : m_context (context)
4251 , m_referenceTarget (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
4255 const Context& getContext (void) const { return m_context.getContext(); }
4256 TestLog& getLog (void) const { return m_context.getLog(); }
4257 tcu::ResultCollector& getResultCollector (void) const { return m_context.getResultCollector(); }
4259 TextureLevel& getReferenceTarget (void) { return m_referenceTarget; }
4261 ReferenceMemory& getReference (void) { return m_context.getReference(); }
4262 TextureLevel& getReferenceImage (void) { return m_context.getReferenceImage();}
4265 VerifyContext& m_context;
4266 TextureLevel m_referenceTarget;
4269 class RenderPassCommand
4272 virtual ~RenderPassCommand (void) {}
4273 virtual const char* getName (void) const = 0;
4275 // Log things that are done during prepare
4276 virtual void logPrepare (TestLog&, size_t) const {}
4277 // Log submitted calls etc.
4278 virtual void logSubmit (TestLog&, size_t) const {}
4280 // Allocate vulkan resources and prepare for submit.
4281 virtual void prepare (PrepareRenderPassContext&) {}
4283 // Submit commands to command buffer.
4284 virtual void submit (SubmitContext&) {}
4287 virtual void verify (VerifyRenderPassContext&, size_t) {}
4290 class SubmitRenderPass : public CmdCommand
4293 SubmitRenderPass (const vector<RenderPassCommand*>& commands);
4294 ~SubmitRenderPass (void);
4295 const char* getName (void) const { return "SubmitRenderPass"; }
4297 void logPrepare (TestLog&, size_t) const;
4298 void logSubmit (TestLog&, size_t) const;
4300 void prepare (PrepareContext&);
4301 void submit (SubmitContext&);
4303 void verify (VerifyContext&, size_t);
4306 const deInt32 m_targetWidth;
4307 const deInt32 m_targetHeight;
4308 vk::Move<vk::VkRenderPass> m_renderPass;
4309 vk::Move<vk::VkDeviceMemory> m_colorTargetMemory;
4310 de::MovePtr<vk::Allocation> m_colorTargetMemory2;
4311 vk::Move<vk::VkImage> m_colorTarget;
4312 vk::Move<vk::VkImageView> m_colorTargetView;
4313 vk::Move<vk::VkFramebuffer> m_framebuffer;
4314 vector<RenderPassCommand*> m_commands;
4317 SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
4318 : m_targetWidth (256)
4319 , m_targetHeight (256)
4320 , m_commands (commands)
4324 SubmitRenderPass::~SubmitRenderPass()
4326 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4327 delete m_commands[cmdNdx];
4330 void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
4332 const string sectionName (de::toString(commandIndex) + ":" + getName());
4333 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4335 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4337 RenderPassCommand& command = *m_commands[cmdNdx];
4338 command.logPrepare(log, cmdNdx);
4342 void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
4344 const string sectionName (de::toString(commandIndex) + ":" + getName());
4345 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4347 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4349 RenderPassCommand& command = *m_commands[cmdNdx];
4350 command.logSubmit(log, cmdNdx);
4354 void SubmitRenderPass::prepare (PrepareContext& context)
4356 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4357 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4358 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4359 const vk::VkDevice device = context.getContext().getDevice();
4360 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4362 const vk::VkAttachmentReference colorAttachments[] =
4364 { 0, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }
4366 const vk::VkSubpassDescription subpass =
4369 vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
4374 DE_LENGTH_OF_ARRAY(colorAttachments),
4381 const vk::VkAttachmentDescription attachment =
4384 vk::VK_FORMAT_R8G8B8A8_UNORM,
4385 vk::VK_SAMPLE_COUNT_1_BIT,
4387 vk::VK_ATTACHMENT_LOAD_OP_CLEAR,
4388 vk::VK_ATTACHMENT_STORE_OP_STORE,
4390 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
4391 vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,
4393 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4394 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
4397 const vk::VkImageCreateInfo createInfo =
4399 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
4403 vk::VK_IMAGE_TYPE_2D,
4404 vk::VK_FORMAT_R8G8B8A8_UNORM,
4405 { (deUint32)m_targetWidth, (deUint32)m_targetHeight, 1u },
4408 vk::VK_SAMPLE_COUNT_1_BIT,
4409 vk::VK_IMAGE_TILING_OPTIMAL,
4410 vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
4411 vk::VK_SHARING_MODE_EXCLUSIVE,
4412 (deUint32)queueFamilies.size(),
4414 vk::VK_IMAGE_LAYOUT_UNDEFINED
4417 m_colorTarget = vk::createImage(vkd, device, &createInfo);
4420 m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
4423 const vk::VkImageViewCreateInfo createInfo =
4425 vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4430 vk::VK_IMAGE_VIEW_TYPE_2D,
4431 vk::VK_FORMAT_R8G8B8A8_UNORM,
4433 vk::VK_COMPONENT_SWIZZLE_R,
4434 vk::VK_COMPONENT_SWIZZLE_G,
4435 vk::VK_COMPONENT_SWIZZLE_B,
4436 vk::VK_COMPONENT_SWIZZLE_A
4439 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4447 m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
4450 const vk::VkRenderPassCreateInfo createInfo =
4452 vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
4466 m_renderPass = vk::createRenderPass(vkd, device, &createInfo);
4470 const vk::VkImageView imageViews[] =
4474 const vk::VkFramebufferCreateInfo createInfo =
4476 vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4481 DE_LENGTH_OF_ARRAY(imageViews),
4483 (deUint32)m_targetWidth,
4484 (deUint32)m_targetHeight,
4488 m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
4492 PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
4494 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4496 RenderPassCommand& command = *m_commands[cmdNdx];
4497 command.prepare(renderpassContext);
4502 void SubmitRenderPass::submit (SubmitContext& context)
4504 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4505 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4506 const vk::VkClearValue clearValue = vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 1.0f);
4508 const vk::VkRenderPassBeginInfo beginInfo =
4510 vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
4516 { { 0, 0 }, { (deUint32)m_targetWidth, (deUint32)m_targetHeight } },
4521 vkd.cmdBeginRenderPass(commandBuffer, &beginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
4523 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4525 RenderPassCommand& command = *m_commands[cmdNdx];
4527 command.submit(context);
4530 vkd.cmdEndRenderPass(commandBuffer);
4533 void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
4535 TestLog& log (context.getLog());
4536 tcu::ResultCollector& resultCollector (context.getResultCollector());
4537 const string sectionName (de::toString(commandIndex) + ":" + getName());
4538 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4539 VerifyRenderPassContext verifyContext (context, m_targetWidth, m_targetHeight);
4541 tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4543 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4545 RenderPassCommand& command = *m_commands[cmdNdx];
4546 command.verify(verifyContext, cmdNdx);
4550 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4551 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4552 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4553 const vk::VkDevice device = context.getContext().getDevice();
4554 const vk::VkQueue queue = context.getContext().getQueue();
4555 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4556 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4557 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4558 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4559 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4561 const vk::VkImageMemoryBarrier imageBarrier =
4563 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4566 vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
4567 vk::VK_ACCESS_TRANSFER_READ_BIT,
4569 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4570 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4572 VK_QUEUE_FAMILY_IGNORED,
4573 VK_QUEUE_FAMILY_IGNORED,
4577 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4579 1, // Mip level count
4584 const vk::VkBufferMemoryBarrier bufferBarrier =
4586 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4589 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4590 vk::VK_ACCESS_HOST_READ_BIT,
4592 VK_QUEUE_FAMILY_IGNORED,
4593 VK_QUEUE_FAMILY_IGNORED,
4598 const vk::VkBufferImageCopy region =
4603 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4610 (deUint32)m_targetWidth,
4611 (deUint32)m_targetHeight,
4616 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4617 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4618 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4621 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4622 queueRun(vkd, queue, *commandBuffer);
4625 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
4627 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_targetWidth * m_targetHeight);
4630 const deUint8* const data = (const deUint8*)ptr;
4631 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
4632 const ConstPixelBufferAccess& refAccess (verifyContext.getReferenceTarget().getAccess());
4634 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4635 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4638 vkd.unmapMemory(device, *memory);
4643 struct PipelineResources
4645 vk::Move<vk::VkPipeline> pipeline;
4646 vk::Move<vk::VkDescriptorSetLayout> descriptorSetLayout;
4647 vk::Move<vk::VkPipelineLayout> pipelineLayout;
4650 void createPipelineWithResources (const vk::DeviceInterface& vkd,
4651 const vk::VkDevice device,
4652 const vk::VkRenderPass renderPass,
4653 const deUint32 subpass,
4654 const vk::VkShaderModule& vertexShaderModule,
4655 const vk::VkShaderModule& fragmentShaderModule,
4656 const deUint32 viewPortWidth,
4657 const deUint32 viewPortHeight,
4658 const vector<vk::VkVertexInputBindingDescription>& vertexBindingDescriptions,
4659 const vector<vk::VkVertexInputAttributeDescription>& vertexAttributeDescriptions,
4660 const vector<vk::VkDescriptorSetLayoutBinding>& bindings,
4661 PipelineResources& resources)
4663 if (!bindings.empty())
4665 const vk::VkDescriptorSetLayoutCreateInfo createInfo =
4667 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
4671 (deUint32)bindings.size(),
4672 bindings.empty() ? DE_NULL : &bindings[0]
4675 resources.descriptorSetLayout = vk::createDescriptorSetLayout(vkd, device, &createInfo);
4679 const vk::VkDescriptorSetLayout descriptorSetLayout_ = *resources.descriptorSetLayout;
4680 const vk::VkPipelineLayoutCreateInfo createInfo =
4682 vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
4686 resources.descriptorSetLayout ? 1u : 0u,
4687 resources.descriptorSetLayout ? &descriptorSetLayout_ : DE_NULL,
4693 resources.pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
4697 const vk::VkPipelineShaderStageCreateInfo shaderStages[] =
4700 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4703 vk::VK_SHADER_STAGE_VERTEX_BIT,
4709 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4712 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
4713 fragmentShaderModule,
4718 const vk::VkPipelineDepthStencilStateCreateInfo depthStencilState =
4720 vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
4725 vk::VK_COMPARE_OP_ALWAYS,
4729 vk::VK_STENCIL_OP_KEEP,
4730 vk::VK_STENCIL_OP_KEEP,
4731 vk::VK_STENCIL_OP_KEEP,
4732 vk::VK_COMPARE_OP_ALWAYS,
4738 vk::VK_STENCIL_OP_KEEP,
4739 vk::VK_STENCIL_OP_KEEP,
4740 vk::VK_STENCIL_OP_KEEP,
4741 vk::VK_COMPARE_OP_ALWAYS,
4749 const vk::VkPipelineVertexInputStateCreateInfo vertexInputState =
4751 vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
4755 (deUint32)vertexBindingDescriptions.size(),
4756 vertexBindingDescriptions.empty() ? DE_NULL : &vertexBindingDescriptions[0],
4758 (deUint32)vertexAttributeDescriptions.size(),
4759 vertexAttributeDescriptions.empty() ? DE_NULL : &vertexAttributeDescriptions[0]
4761 const vk::VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
4763 vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
4766 vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
4769 const vk::VkViewport viewports[] =
4771 { 0.0f, 0.0f, (float)viewPortWidth, (float)viewPortHeight, 0.0f, 1.0f }
4773 const vk::VkRect2D scissors[] =
4775 { { 0, 0 }, { (deUint32)viewPortWidth, (deUint32)viewPortHeight } }
4777 const vk::VkPipelineViewportStateCreateInfo viewportState =
4779 vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
4782 DE_LENGTH_OF_ARRAY(viewports),
4784 DE_LENGTH_OF_ARRAY(scissors),
4787 const vk::VkPipelineRasterizationStateCreateInfo rasterState =
4789 vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
4795 vk::VK_POLYGON_MODE_FILL,
4796 vk::VK_CULL_MODE_NONE,
4797 vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,
4804 const vk::VkSampleMask sampleMask = ~0u;
4805 const vk::VkPipelineMultisampleStateCreateInfo multisampleState =
4807 vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
4811 vk::VK_SAMPLE_COUNT_1_BIT,
4818 const vk::VkPipelineColorBlendAttachmentState attachments[] =
4822 vk::VK_BLEND_FACTOR_ONE,
4823 vk::VK_BLEND_FACTOR_ZERO,
4824 vk::VK_BLEND_OP_ADD,
4825 vk::VK_BLEND_FACTOR_ONE,
4826 vk::VK_BLEND_FACTOR_ZERO,
4827 vk::VK_BLEND_OP_ADD,
4828 (vk::VK_COLOR_COMPONENT_R_BIT|
4829 vk::VK_COLOR_COMPONENT_G_BIT|
4830 vk::VK_COLOR_COMPONENT_B_BIT|
4831 vk::VK_COLOR_COMPONENT_A_BIT)
4834 const vk::VkPipelineColorBlendStateCreateInfo colorBlendState =
4836 vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
4841 vk::VK_LOGIC_OP_COPY,
4842 DE_LENGTH_OF_ARRAY(attachments),
4844 { 0.0f, 0.0f, 0.0f, 0.0f }
4846 const vk::VkGraphicsPipelineCreateInfo createInfo =
4848 vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
4852 DE_LENGTH_OF_ARRAY(shaderStages),
4856 &inputAssemblyState,
4864 *resources.pipelineLayout,
4871 resources.pipeline = vk::createGraphicsPipeline(vkd, device, 0, &createInfo);
4875 class RenderIndexBuffer : public RenderPassCommand
4878 RenderIndexBuffer (void) {}
4879 ~RenderIndexBuffer (void) {}
4881 const char* getName (void) const { return "RenderIndexBuffer"; }
4882 void logPrepare (TestLog&, size_t) const;
4883 void logSubmit (TestLog&, size_t) const;
4884 void prepare (PrepareRenderPassContext&);
4885 void submit (SubmitContext& context);
4886 void verify (VerifyRenderPassContext&, size_t);
4889 PipelineResources m_resources;
4890 vk::VkDeviceSize m_bufferSize;
4893 void RenderIndexBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4895 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as index buffer." << TestLog::EndMessage;
4898 void RenderIndexBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4900 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as index buffer." << TestLog::EndMessage;
4903 void RenderIndexBuffer::prepare (PrepareRenderPassContext& context)
4905 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4906 const vk::VkDevice device = context.getContext().getDevice();
4907 const vk::VkRenderPass renderPass = context.getRenderPass();
4908 const deUint32 subpass = 0;
4909 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("index-buffer.vert"), 0));
4910 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4912 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
4913 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), vector<vk::VkDescriptorSetLayoutBinding>(), m_resources);
4914 m_bufferSize = context.getBufferSize();
4917 void RenderIndexBuffer::submit (SubmitContext& context)
4919 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4920 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4922 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
4923 vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
4924 vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
4927 void RenderIndexBuffer::verify (VerifyRenderPassContext& context, size_t)
4929 for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4931 const deUint8 x = context.getReference().get(pos * 2);
4932 const deUint8 y = context.getReference().get((pos * 2) + 1);
4934 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4938 class RenderVertexBuffer : public RenderPassCommand
4941 RenderVertexBuffer (void) {}
4942 ~RenderVertexBuffer (void) {}
4944 const char* getName (void) const { return "RenderVertexBuffer"; }
4945 void logPrepare (TestLog&, size_t) const;
4946 void logSubmit (TestLog&, size_t) const;
4947 void prepare (PrepareRenderPassContext&);
4948 void submit (SubmitContext& context);
4949 void verify (VerifyRenderPassContext&, size_t);
4952 PipelineResources m_resources;
4953 vk::VkDeviceSize m_bufferSize;
4956 void RenderVertexBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4958 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as vertex buffer." << TestLog::EndMessage;
4961 void RenderVertexBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4963 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as vertex buffer." << TestLog::EndMessage;
4966 void RenderVertexBuffer::prepare (PrepareRenderPassContext& context)
4968 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4969 const vk::VkDevice device = context.getContext().getDevice();
4970 const vk::VkRenderPass renderPass = context.getRenderPass();
4971 const deUint32 subpass = 0;
4972 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("vertex-buffer.vert"), 0));
4973 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4975 vector<vk::VkVertexInputAttributeDescription> vertexAttributeDescriptions;
4976 vector<vk::VkVertexInputBindingDescription> vertexBindingDescriptions;
4979 const vk::VkVertexInputBindingDescription vertexBindingDescription =
4983 vk::VK_VERTEX_INPUT_RATE_VERTEX
4986 vertexBindingDescriptions.push_back(vertexBindingDescription);
4989 const vk::VkVertexInputAttributeDescription vertexAttributeDescription =
4993 vk::VK_FORMAT_R8G8_UNORM,
4997 vertexAttributeDescriptions.push_back(vertexAttributeDescription);
4999 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5000 vertexBindingDescriptions, vertexAttributeDescriptions, vector<vk::VkDescriptorSetLayoutBinding>(), m_resources);
5002 m_bufferSize = context.getBufferSize();
5005 void RenderVertexBuffer::submit (SubmitContext& context)
5007 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5008 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5009 const vk::VkDeviceSize offset = 0;
5010 const vk::VkBuffer buffer = context.getBuffer();
5012 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5013 vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
5014 vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
5017 void RenderVertexBuffer::verify (VerifyRenderPassContext& context, size_t)
5019 for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
5021 const deUint8 x = context.getReference().get(pos * 2);
5022 const deUint8 y = context.getReference().get((pos * 2) + 1);
5024 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5028 class RenderVertexUniformBuffer : public RenderPassCommand
5031 RenderVertexUniformBuffer (void) {}
5032 ~RenderVertexUniformBuffer (void);
5034 const char* getName (void) const { return "RenderVertexUniformBuffer"; }
5035 void logPrepare (TestLog&, size_t) const;
5036 void logSubmit (TestLog&, size_t) const;
5037 void prepare (PrepareRenderPassContext&);
5038 void submit (SubmitContext& context);
5039 void verify (VerifyRenderPassContext&, size_t);
5042 PipelineResources m_resources;
5043 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5044 vector<vk::VkDescriptorSet> m_descriptorSets;
5046 vk::VkDeviceSize m_bufferSize;
5049 RenderVertexUniformBuffer::~RenderVertexUniformBuffer (void)
5053 void RenderVertexUniformBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5055 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
5058 void RenderVertexUniformBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5060 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
5063 void RenderVertexUniformBuffer::prepare (PrepareRenderPassContext& context)
5065 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5066 const vk::VkDevice device = context.getContext().getDevice();
5067 const vk::VkRenderPass renderPass = context.getRenderPass();
5068 const deUint32 subpass = 0;
5069 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-buffer.vert"), 0));
5070 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5071 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5073 m_bufferSize = context.getBufferSize();
5076 const vk::VkDescriptorSetLayoutBinding binding =
5079 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5081 vk::VK_SHADER_STAGE_VERTEX_BIT,
5085 bindings.push_back(binding);
5088 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5089 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, m_resources);
5092 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE));
5093 const vk::VkDescriptorPoolSize poolSizes =
5095 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5098 const vk::VkDescriptorPoolCreateInfo createInfo =
5100 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5102 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5109 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5110 m_descriptorSets.resize(descriptorCount);
5113 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5115 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5116 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5118 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5126 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5129 const vk::VkDescriptorBufferInfo bufferInfo =
5131 context.getBuffer(),
5132 (vk::VkDeviceSize)(descriptorSetNdx * (size_t)MAX_UNIFORM_BUFFER_SIZE),
5133 m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5134 ? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5135 : (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5137 const vk::VkWriteDescriptorSet write =
5139 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5141 m_descriptorSets[descriptorSetNdx],
5145 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5151 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5156 void RenderVertexUniformBuffer::submit (SubmitContext& context)
5158 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5159 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5161 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5163 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5165 const size_t size = (size_t)(m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5166 ? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5167 : MAX_UNIFORM_BUFFER_SIZE);
5168 const deUint32 count = (deUint32)(size / 2);
5170 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5171 vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5175 void RenderVertexUniformBuffer::verify (VerifyRenderPassContext& context, size_t)
5177 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5179 const size_t offset = descriptorSetNdx * MAX_UNIFORM_BUFFER_SIZE;
5180 const size_t size = (size_t)(m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5181 ? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5182 : MAX_UNIFORM_BUFFER_SIZE);
5183 const size_t count = size / 2;
5185 for (size_t pos = 0; pos < count; pos++)
5187 const deUint8 x = context.getReference().get(offset + pos * 2);
5188 const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5190 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5195 class RenderVertexUniformTexelBuffer : public RenderPassCommand
5198 RenderVertexUniformTexelBuffer (void) {}
5199 ~RenderVertexUniformTexelBuffer (void);
5201 const char* getName (void) const { return "RenderVertexUniformTexelBuffer"; }
5202 void logPrepare (TestLog&, size_t) const;
5203 void logSubmit (TestLog&, size_t) const;
5204 void prepare (PrepareRenderPassContext&);
5205 void submit (SubmitContext& context);
5206 void verify (VerifyRenderPassContext&, size_t);
5209 PipelineResources m_resources;
5210 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5211 vector<vk::VkDescriptorSet> m_descriptorSets;
5212 vector<vk::VkBufferView> m_bufferViews;
5214 const vk::DeviceInterface* m_vkd;
5215 vk::VkDevice m_device;
5216 vk::VkDeviceSize m_bufferSize;
5217 deUint32 m_maxUniformTexelCount;
5220 RenderVertexUniformTexelBuffer::~RenderVertexUniformTexelBuffer (void)
5222 for (size_t bufferViewNdx = 0; bufferViewNdx < m_bufferViews.size(); bufferViewNdx++)
5224 if (!!m_bufferViews[bufferViewNdx])
5226 m_vkd->destroyBufferView(m_device, m_bufferViews[bufferViewNdx], DE_NULL);
5227 m_bufferViews[bufferViewNdx] = (vk::VkBufferView)0;
5232 void RenderVertexUniformTexelBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5234 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
5237 void RenderVertexUniformTexelBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5239 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
5242 void RenderVertexUniformTexelBuffer::prepare (PrepareRenderPassContext& context)
5244 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
5245 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
5246 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5247 const vk::VkDevice device = context.getContext().getDevice();
5248 const vk::VkRenderPass renderPass = context.getRenderPass();
5249 const deUint32 subpass = 0;
5250 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-texel-buffer.vert"), 0));
5251 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5252 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5256 m_bufferSize = context.getBufferSize();
5257 m_maxUniformTexelCount = vk::getPhysicalDeviceProperties(vki, physicalDevice).limits.maxTexelBufferElements;
5260 const vk::VkDescriptorSetLayoutBinding binding =
5263 vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5265 vk::VK_SHADER_STAGE_VERTEX_BIT,
5269 bindings.push_back(binding);
5272 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5273 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, m_resources);
5276 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)m_maxUniformTexelCount * 2));
5277 const vk::VkDescriptorPoolSize poolSizes =
5279 vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5282 const vk::VkDescriptorPoolCreateInfo createInfo =
5284 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5286 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5293 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5294 m_descriptorSets.resize(descriptorCount, (vk::VkDescriptorSet)0);
5295 m_bufferViews.resize(descriptorCount, (vk::VkBufferView)0);
5298 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5300 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5301 ? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5302 : m_maxUniformTexelCount * 2) / 2;
5303 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5304 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5306 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5314 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5317 const vk::VkBufferViewCreateInfo createInfo =
5319 vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5323 context.getBuffer(),
5324 vk::VK_FORMAT_R16_UINT,
5325 descriptorSetNdx * m_maxUniformTexelCount * 2,
5329 VK_CHECK(vkd.createBufferView(device, &createInfo, DE_NULL, &m_bufferViews[descriptorSetNdx]));
5333 const vk::VkWriteDescriptorSet write =
5335 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5337 m_descriptorSets[descriptorSetNdx],
5341 vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5344 &m_bufferViews[descriptorSetNdx]
5347 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5352 void RenderVertexUniformTexelBuffer::submit (SubmitContext& context)
5354 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5355 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5357 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5359 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5361 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5362 ? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5363 : m_maxUniformTexelCount * 2) / 2;
5365 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5366 vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5370 void RenderVertexUniformTexelBuffer::verify (VerifyRenderPassContext& context, size_t)
5372 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5374 const size_t offset = descriptorSetNdx * m_maxUniformTexelCount * 2;
5375 const deUint32 count = (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5376 ? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5377 : m_maxUniformTexelCount * 2) / 2;
5379 for (size_t pos = 0; pos < (size_t)count; pos++)
5381 const deUint8 x = context.getReference().get(offset + pos * 2);
5382 const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5384 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5389 class RenderVertexStorageBuffer : public RenderPassCommand
5392 RenderVertexStorageBuffer (void) {}
5393 ~RenderVertexStorageBuffer (void);
5395 const char* getName (void) const { return "RenderVertexStorageBuffer"; }
5396 void logPrepare (TestLog&, size_t) const;
5397 void logSubmit (TestLog&, size_t) const;
5398 void prepare (PrepareRenderPassContext&);
5399 void submit (SubmitContext& context);
5400 void verify (VerifyRenderPassContext&, size_t);
5403 PipelineResources m_resources;
5404 vk::Move<vk::VkDescriptorPool> m_descriptorPool;
5405 vector<vk::VkDescriptorSet> m_descriptorSets;
5407 vk::VkDeviceSize m_bufferSize;
5410 RenderVertexStorageBuffer::~RenderVertexStorageBuffer (void)
5414 void RenderVertexStorageBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5416 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as storage buffer." << TestLog::EndMessage;
5419 void RenderVertexStorageBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5421 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
5424 void RenderVertexStorageBuffer::prepare (PrepareRenderPassContext& context)
5426 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5427 const vk::VkDevice device = context.getContext().getDevice();
5428 const vk::VkRenderPass renderPass = context.getRenderPass();
5429 const deUint32 subpass = 0;
5430 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-buffer.vert"), 0));
5431 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5432 vector<vk::VkDescriptorSetLayoutBinding> bindings;
5434 m_bufferSize = context.getBufferSize();
5437 const vk::VkDescriptorSetLayoutBinding binding =
5440 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5442 vk::VK_SHADER_STAGE_VERTEX_BIT,
5446 bindings.push_back(binding);
5449 createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5450 vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, m_resources);
5453 const deUint32 descriptorCount = (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_STORAGE_BUFFER_SIZE));
5454 const vk::VkDescriptorPoolSize poolSizes =
5456 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5459 const vk::VkDescriptorPoolCreateInfo createInfo =
5461 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5463 vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5470 m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5471 m_descriptorSets.resize(descriptorCount);
5474 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5476 const vk::VkDescriptorSetLayout layout = *m_resources.descriptorSetLayout;
5477 const vk::VkDescriptorSetAllocateInfo allocateInfo =
5479 vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5487 m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5490 const vk::VkDescriptorBufferInfo bufferInfo =
5492 context.getBuffer(),
5493 descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE,
5494 de::min(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE, (vk::VkDeviceSize)MAX_STORAGE_BUFFER_SIZE)
5496 const vk::VkWriteDescriptorSet write =
5498 vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5500 m_descriptorSets[descriptorSetNdx],
5504 vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5510 vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5515 void RenderVertexStorageBuffer::submit (SubmitContext& context)
5517 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
5518 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
5520 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5522 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5524 const size_t size = m_bufferSize < (descriptorSetNdx + 1) * MAX_STORAGE_BUFFER_SIZE
5525 ? (size_t)(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE)
5526 : (size_t)(MAX_STORAGE_BUFFER_SIZE);
5528 vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5529 vkd.cmdDraw(commandBuffer, (deUint32)(size / 2), 1, 0, 0);
5533 void RenderVertexStorageBuffer::verify (VerifyRenderPassContext& context, size_t)
5535 for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5537 const size_t offset = descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE;
5538 const size_t size = m_bufferSize < (descriptorSetNdx + 1) * MAX_STORAGE_BUFFER_SIZE
5539 ? (size_t)(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE)
5540 : (size_t)(MAX_STORAGE_BUFFER_SIZE);
5542 for (size_t pos = 0; pos < size / 2; pos++)
5544 const deUint8 x = context.getReference().get(offset + pos * 2);
5545 const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5547 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5566 OP_BUFFER_BINDMEMORY,
5568 OP_QUEUE_WAIT_FOR_IDLE,
5569 OP_DEVICE_WAIT_FOR_IDLE,
5571 OP_COMMAND_BUFFER_BEGIN,
5572 OP_COMMAND_BUFFER_END,
5574 // Buffer transfer operations
5578 OP_BUFFER_COPY_TO_BUFFER,
5579 OP_BUFFER_COPY_FROM_BUFFER,
5581 OP_BUFFER_COPY_TO_IMAGE,
5582 OP_BUFFER_COPY_FROM_IMAGE,
5586 OP_IMAGE_BINDMEMORY,
5588 OP_IMAGE_TRANSITION_LAYOUT,
5590 OP_IMAGE_COPY_TO_BUFFER,
5591 OP_IMAGE_COPY_FROM_BUFFER,
5593 OP_IMAGE_COPY_TO_IMAGE,
5594 OP_IMAGE_COPY_FROM_IMAGE,
5596 OP_IMAGE_BLIT_TO_IMAGE,
5597 OP_IMAGE_BLIT_FROM_IMAGE,
5601 OP_PIPELINE_BARRIER_GLOBAL,
5602 OP_PIPELINE_BARRIER_BUFFER,
5603 OP_PIPELINE_BARRIER_IMAGE,
5605 // Renderpass operations
5606 OP_RENDERPASS_BEGIN,
5609 // Commands inside render pass
5610 OP_RENDER_VERTEX_BUFFER,
5611 OP_RENDER_INDEX_BUFFER,
5613 OP_RENDER_VERTEX_UNIFORM_BUFFER,
5614 OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER,
5616 OP_RENDER_VERTEX_STORAGE_BUFFER,
5622 STAGE_COMMAND_BUFFER,
5627 vk::VkAccessFlags getWriteAccessFlags (void)
5629 return vk::VK_ACCESS_SHADER_WRITE_BIT
5630 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
5631 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
5632 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
5633 | vk::VK_ACCESS_HOST_WRITE_BIT
5634 | vk::VK_ACCESS_MEMORY_WRITE_BIT;
5637 bool isWriteAccess (vk::VkAccessFlagBits access)
5639 return (getWriteAccessFlags() & access) != 0;
5645 CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
5647 bool isValid (vk::VkPipelineStageFlagBits stage,
5648 vk::VkAccessFlagBits access) const;
5650 void perform (vk::VkPipelineStageFlagBits stage,
5651 vk::VkAccessFlagBits access);
5653 void submitCommandBuffer (void);
5654 void waitForIdle (void);
5656 void getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5657 vk::VkAccessFlags& srcAccesses,
5658 vk::VkPipelineStageFlags& dstStages,
5659 vk::VkAccessFlags& dstAccesses) const;
5661 void barrier (vk::VkPipelineStageFlags srcStages,
5662 vk::VkAccessFlags srcAccesses,
5663 vk::VkPipelineStageFlags dstStages,
5664 vk::VkAccessFlags dstAccesses);
5666 void imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5667 vk::VkAccessFlags srcAccesses,
5668 vk::VkPipelineStageFlags dstStages,
5669 vk::VkAccessFlags dstAccesses);
5671 void checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5672 vk::VkAccessFlags srcAccesses,
5673 vk::VkPipelineStageFlags dstStages,
5674 vk::VkAccessFlags dstAccesses);
5676 // Everything is clean and there is no need for barriers
5677 bool isClean (void) const;
5679 vk::VkPipelineStageFlags getAllowedStages (void) const { return m_allowedStages; }
5680 vk::VkAccessFlags getAllowedAcceses (void) const { return m_allowedAccesses; }
5682 // Limit which stages and accesses are used by the CacheState tracker
5683 const vk::VkPipelineStageFlags m_allowedStages;
5684 const vk::VkAccessFlags m_allowedAccesses;
5686 // [dstStage][srcStage] = srcAccesses
5687 // In stage dstStage write srcAccesses from srcStage are not yet available
5688 vk::VkAccessFlags m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5689 // Latest pipeline transition is not available in stage
5690 bool m_unavailableLayoutTransition[PIPELINESTAGE_LAST];
5691 // [dstStage] = dstAccesses
5692 // In stage dstStage ops with dstAccesses are not yet visible
5693 vk::VkAccessFlags m_invisibleOperations[PIPELINESTAGE_LAST];
5695 // [dstStage] = srcStage
5696 // Memory operation in srcStage have not completed before dstStage
5697 vk::VkPipelineStageFlags m_incompleteOperations[PIPELINESTAGE_LAST];
5700 CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
5701 : m_allowedStages (allowedStages)
5702 , m_allowedAccesses (allowedAccesses)
5704 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5706 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5708 if ((dstStage_ & m_allowedStages) == 0)
5711 // All operations are initially visible
5712 m_invisibleOperations[dstStage] = 0;
5714 // There are no incomplete read operations initially
5715 m_incompleteOperations[dstStage] = 0;
5717 // There are no incomplete layout transitions
5718 m_unavailableLayoutTransition[dstStage] = false;
5720 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5722 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5724 if ((srcStage_ & m_allowedStages) == 0)
5727 // There are no write operations that are not yet available
5729 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5734 bool CacheState::isValid (vk::VkPipelineStageFlagBits stage,
5735 vk::VkAccessFlagBits access) const
5737 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5738 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5740 const PipelineStage dstStage = pipelineStageFlagToPipelineStage(stage);
5742 // Previous operations are not visible to access on stage
5743 if (m_unavailableLayoutTransition[dstStage] || (m_invisibleOperations[dstStage] & access) != 0)
5746 if (isWriteAccess(access))
5748 // Memory operations from other stages have not completed before
5750 if (m_incompleteOperations[dstStage] != 0)
5757 void CacheState::perform (vk::VkPipelineStageFlagBits stage,
5758 vk::VkAccessFlagBits access)
5760 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5761 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5763 const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
5765 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5767 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5769 if ((dstStage_ & m_allowedStages) == 0)
5772 // Mark stage as incomplete for all stages
5773 m_incompleteOperations[dstStage] |= stage;
5775 if (isWriteAccess(access))
5777 // Mark all accesses from all stages invisible
5778 m_invisibleOperations[dstStage] |= m_allowedAccesses;
5780 // Mark write access from srcStage unavailable to all stages
5781 m_unavailableWriteOperations[dstStage][srcStage] |= access;
5786 void CacheState::submitCommandBuffer (void)
5788 // Flush all host writes and reads
5789 barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
5790 m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
5795 void CacheState::waitForIdle (void)
5797 // Make all writes available
5798 barrier(m_allowedStages,
5799 m_allowedAccesses & getWriteAccessFlags(),
5803 // Make all writes visible on device side
5804 barrier(m_allowedStages,
5806 m_allowedStages & (~vk::VK_PIPELINE_STAGE_HOST_BIT),
5810 void CacheState::getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5811 vk::VkAccessFlags& srcAccesses,
5812 vk::VkPipelineStageFlags& dstStages,
5813 vk::VkAccessFlags& dstAccesses) const
5820 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5822 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5824 if ((dstStage_ & m_allowedStages) == 0)
5827 // Make sure all previous operation are complete in all stages
5828 if (m_incompleteOperations[dstStage])
5830 dstStages |= dstStage_;
5831 srcStages |= m_incompleteOperations[dstStage];
5834 // Make sure all read operations are visible in dstStage
5835 if (m_invisibleOperations[dstStage])
5837 dstStages |= dstStage_;
5838 dstAccesses |= m_invisibleOperations[dstStage];
5841 // Make sure all write operations fro mall stages are available
5842 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5844 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5846 if ((srcStage_ & m_allowedStages) == 0)
5849 if (m_unavailableWriteOperations[dstStage][srcStage])
5851 dstStages |= dstStage_;
5852 srcStages |= dstStage_;
5853 srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
5856 if (m_unavailableLayoutTransition[dstStage] && !m_unavailableLayoutTransition[srcStage])
5858 // Add dependency between srcStage and dstStage if layout transition has not completed in dstStage,
5859 // but has completed in srcStage.
5860 dstStages |= dstStage_;
5861 srcStages |= dstStage_;
5866 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5867 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5868 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5869 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5872 void CacheState::checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5873 vk::VkAccessFlags srcAccesses,
5874 vk::VkPipelineStageFlags dstStages,
5875 vk::VkAccessFlags dstAccesses)
5877 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5878 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5879 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5880 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5882 DE_UNREF(srcStages);
5883 DE_UNREF(srcAccesses);
5885 DE_UNREF(dstStages);
5886 DE_UNREF(dstAccesses);
5888 #if defined(DE_DEBUG)
5889 // Check that all stages have completed before srcStages or are in srcStages.
5891 vk::VkPipelineStageFlags completedStages = srcStages;
5893 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5895 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5897 if ((srcStage_ & srcStages) == 0)
5900 completedStages |= (~m_incompleteOperations[srcStage]);
5903 DE_ASSERT((completedStages & m_allowedStages) == m_allowedStages);
5906 // Check that any write is available at least in one stage. Since all stages are complete even single flush is enough.
5907 if ((getWriteAccessFlags() & m_allowedAccesses) != 0 && (srcAccesses & getWriteAccessFlags()) == 0)
5909 bool anyWriteAvailable = false;
5911 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5913 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5915 if ((dstStage_ & m_allowedStages) == 0)
5918 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5920 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5922 if ((srcStage_ & m_allowedStages) == 0)
5925 if (m_unavailableWriteOperations[dstStage][srcStage] != (getWriteAccessFlags() & m_allowedAccesses))
5927 anyWriteAvailable = true;
5933 DE_ASSERT(anyWriteAvailable);
5938 void CacheState::imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5939 vk::VkAccessFlags srcAccesses,
5940 vk::VkPipelineStageFlags dstStages,
5941 vk::VkAccessFlags dstAccesses)
5943 checkImageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
5945 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5947 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5949 if ((dstStage_ & m_allowedStages) == 0)
5952 // All stages are incomplete after the barrier except each dstStage in it self.
5953 m_incompleteOperations[dstStage] = m_allowedStages & (~dstStage_);
5955 // All memory operations are invisible unless they are listed in dstAccess
5956 m_invisibleOperations[dstStage] = m_allowedAccesses & (~dstAccesses);
5958 // Layout transition is unavailable in stage unless it was listed in dstStages
5959 m_unavailableLayoutTransition[dstStage]= (dstStage_ & dstStages) == 0;
5961 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5963 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5965 if ((srcStage_ & m_allowedStages) == 0)
5968 // All write operations are available after layout transition
5969 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5974 void CacheState::barrier (vk::VkPipelineStageFlags srcStages,
5975 vk::VkAccessFlags srcAccesses,
5976 vk::VkPipelineStageFlags dstStages,
5977 vk::VkAccessFlags dstAccesses)
5979 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5980 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5981 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5982 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5986 vk::VkPipelineStageFlags oldIncompleteOperations[PIPELINESTAGE_LAST];
5987 vk::VkAccessFlags oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5988 bool oldUnavailableLayoutTransition[PIPELINESTAGE_LAST];
5990 deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
5991 deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
5992 deMemcpy(oldUnavailableLayoutTransition, m_unavailableLayoutTransition, sizeof(oldUnavailableLayoutTransition));
5994 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5996 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5998 if ((srcStage_ & srcStages) == 0)
6001 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
6003 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6005 if ((dstStage_ & dstStages) == 0)
6008 // Stages that have completed before srcStage have also completed before dstStage
6009 m_incompleteOperations[dstStage] &= oldIncompleteOperations[srcStage];
6011 // Image layout transition in srcStage are now available in dstStage
6012 m_unavailableLayoutTransition[dstStage] &= oldUnavailableLayoutTransition[srcStage];
6014 for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
6016 const PipelineStage sharedStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
6018 if ((sharedStage_ & m_allowedStages) == 0)
6021 // Writes that are available in srcStage are also available in dstStage
6022 m_unavailableWriteOperations[dstStage][sharedStage] &= oldUnavailableWriteOperations[srcStage][sharedStage];
6029 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
6031 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6032 bool allWritesAvailable = true;
6034 if ((dstStage_ & dstStages) == 0)
6037 // Operations in srcStages have completed before any stage in dstStages
6038 m_incompleteOperations[dstStage] &= ~srcStages;
6040 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
6042 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6044 if ((srcStage_ & m_allowedStages) == 0)
6047 // Make srcAccesses from srcStage available in dstStage
6048 if ((srcStage_ & srcStages) != 0)
6049 m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
6051 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
6052 allWritesAvailable = false;
6055 // If all writes are available in dstStage make dstAccesses also visible
6056 if (allWritesAvailable)
6057 m_invisibleOperations[dstStage] &= ~dstAccesses;
6061 bool CacheState::isClean (void) const
6063 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
6065 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
6067 if ((dstStage_ & m_allowedStages) == 0)
6070 // Some operations are not visible to some stages
6071 if (m_invisibleOperations[dstStage] != 0)
6074 // There are operation that have not completed yet
6075 if (m_incompleteOperations[dstStage] != 0)
6078 // Layout transition has not completed yet
6079 if (m_unavailableLayoutTransition[dstStage])
6082 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
6084 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
6086 if ((srcStage_ & m_allowedStages) == 0)
6089 // Some write operations are not available yet
6090 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
6100 State (Usage usage, deUint32 seed)
6101 : stage (STAGE_HOST)
6102 , cache (usageToStageFlags(usage), usageToAccessFlags(usage))
6105 , hostInvalidated (true)
6106 , hostFlushed (true)
6107 , memoryDefined (false)
6109 , hasBoundBufferMemory (false)
6111 , hasBoundImageMemory (false)
6112 , imageLayout (vk::VK_IMAGE_LAYOUT_UNDEFINED)
6113 , imageDefined (false)
6116 , commandBufferIsEmpty (true)
6117 , renderPassIsEmpty (true)
6126 bool hostInvalidated;
6131 bool hasBoundBufferMemory;
6134 bool hasBoundImageMemory;
6135 vk::VkImageLayout imageLayout;
6141 bool commandBufferIsEmpty;
6142 bool renderPassIsEmpty;
6145 void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
6147 if (state.stage == STAGE_HOST)
6149 if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
6151 // Host memory operations
6154 ops.push_back(OP_UNMAP);
6156 // Avoid flush and finish if they are not needed
6157 if (!state.hostFlushed)
6158 ops.push_back(OP_MAP_FLUSH);
6160 if (!state.hostInvalidated
6162 && ((usage & USAGE_HOST_READ) == 0
6163 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
6164 && ((usage & USAGE_HOST_WRITE) == 0
6165 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
6167 ops.push_back(OP_MAP_INVALIDATE);
6170 if (usage & USAGE_HOST_READ
6171 && usage & USAGE_HOST_WRITE
6172 && state.memoryDefined
6173 && state.hostInvalidated
6175 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
6176 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
6178 ops.push_back(OP_MAP_MODIFY);
6181 if (usage & USAGE_HOST_READ
6182 && state.memoryDefined
6183 && state.hostInvalidated
6185 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
6187 ops.push_back(OP_MAP_READ);
6190 if (usage & USAGE_HOST_WRITE
6191 && state.hostInvalidated
6193 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
6195 ops.push_back(OP_MAP_WRITE);
6199 ops.push_back(OP_MAP);
6202 if (state.hasBoundBufferMemory && state.queueIdle)
6204 // \note Destroy only buffers after they have been bound
6205 ops.push_back(OP_BUFFER_DESTROY);
6209 if (state.hasBuffer)
6211 if (!state.hasBoundBufferMemory)
6212 ops.push_back(OP_BUFFER_BINDMEMORY);
6214 else if (!state.hasImage && supportsBuffers) // Avoid creating buffer if there is already image
6215 ops.push_back(OP_BUFFER_CREATE);
6218 if (state.hasBoundImageMemory && state.queueIdle)
6220 // \note Destroy only image after they have been bound
6221 ops.push_back(OP_IMAGE_DESTROY);
6227 if (!state.hasBoundImageMemory)
6228 ops.push_back(OP_IMAGE_BINDMEMORY);
6230 else if (!state.hasBuffer && supportsImages) // Avoid creating image if there is already buffer
6231 ops.push_back(OP_IMAGE_CREATE);
6234 // Host writes must be flushed before GPU commands and there must be
6235 // buffer or image for GPU commands
6236 if (state.hostFlushed
6237 && (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
6238 && (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
6239 && (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
6241 ops.push_back(OP_COMMAND_BUFFER_BEGIN);
6244 if (!state.deviceIdle)
6245 ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
6247 if (!state.queueIdle)
6248 ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
6250 else if (state.stage == STAGE_COMMAND_BUFFER)
6252 if (!state.cache.isClean())
6254 ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
6257 ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
6259 if (state.hasBuffer)
6260 ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
6263 if (state.hasBoundBufferMemory)
6265 if (usage & USAGE_TRANSFER_DST
6266 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
6268 ops.push_back(OP_BUFFER_FILL);
6269 ops.push_back(OP_BUFFER_UPDATE);
6270 ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
6271 ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
6274 if (usage & USAGE_TRANSFER_SRC
6275 && state.memoryDefined
6276 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
6278 ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
6279 ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
6283 if (state.hasBoundImageMemory)
6285 ops.push_back(OP_IMAGE_TRANSITION_LAYOUT);
6288 if (usage & USAGE_TRANSFER_DST
6289 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
6290 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
6291 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
6293 ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
6294 ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
6295 ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
6298 if (usage & USAGE_TRANSFER_SRC
6299 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
6300 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
6301 && state.imageDefined
6302 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
6304 ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
6305 ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
6306 ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
6311 // \todo [2016-03-09 mika] Add other usages?
6312 if (state.memoryDefined
6313 && state.hasBoundBufferMemory
6314 && (((usage & USAGE_VERTEX_BUFFER)
6315 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
6316 || ((usage & USAGE_INDEX_BUFFER)
6317 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
6318 || ((usage & USAGE_UNIFORM_BUFFER)
6319 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
6320 || ((usage & USAGE_UNIFORM_TEXEL_BUFFER)
6321 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
6322 || ((usage & USAGE_STORAGE_BUFFER)
6323 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))))
6325 ops.push_back(OP_RENDERPASS_BEGIN);
6328 // \note This depends on previous operations and has to be always the
6329 // last command buffer operation check
6330 if (ops.empty() || !state.commandBufferIsEmpty)
6331 ops.push_back(OP_COMMAND_BUFFER_END);
6333 else if (state.stage == STAGE_RENDER_PASS)
6335 if ((usage & USAGE_VERTEX_BUFFER) != 0
6336 && state.memoryDefined
6337 && state.hasBoundBufferMemory
6338 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
6340 ops.push_back(OP_RENDER_VERTEX_BUFFER);
6343 if ((usage & USAGE_INDEX_BUFFER) != 0
6344 && state.memoryDefined
6345 && state.hasBoundBufferMemory
6346 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
6348 ops.push_back(OP_RENDER_INDEX_BUFFER);
6351 if ((usage & USAGE_UNIFORM_BUFFER) != 0
6352 && state.memoryDefined
6353 && state.hasBoundBufferMemory
6354 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
6356 ops.push_back(OP_RENDER_VERTEX_UNIFORM_BUFFER);
6359 if ((usage & USAGE_UNIFORM_TEXEL_BUFFER) != 0
6360 && state.memoryDefined
6361 && state.hasBoundBufferMemory
6362 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
6364 ops.push_back(OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER);
6367 if ((usage & USAGE_STORAGE_BUFFER) != 0
6368 && state.memoryDefined
6369 && state.hasBoundBufferMemory
6370 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
6372 ops.push_back(OP_RENDER_VERTEX_STORAGE_BUFFER);
6375 if (!state.renderPassIsEmpty)
6376 ops.push_back(OP_RENDERPASS_END);
6379 DE_FATAL("Unknown stage");
6382 bool layoutSupportedByUsage (Usage usage, vk::VkImageLayout layout)
6386 case vk::VK_IMAGE_LAYOUT_GENERAL:
6389 case vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
6390 return (usage & USAGE_COLOR_ATTACHMENT) != 0;
6392 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
6393 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
6395 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
6396 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
6398 case vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
6399 // \todo [2016-03-09 mika] Should include input attachment
6400 return (usage & USAGE_TEXTURE_SAMPLED) != 0;
6402 case vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
6403 return (usage & USAGE_TRANSFER_SRC) != 0;
6405 case vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
6406 return (usage & USAGE_TRANSFER_DST) != 0;
6408 case vk::VK_IMAGE_LAYOUT_PREINITIALIZED:
6412 DE_FATAL("Unknown layout");
6417 vk::VkImageLayout getRandomNextLayout (de::Random& rng,
6419 vk::VkImageLayout previousLayout)
6421 const vk::VkImageLayout layouts[] =
6423 vk::VK_IMAGE_LAYOUT_GENERAL,
6424 vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
6425 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
6426 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
6427 vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
6428 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
6429 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
6431 size_t possibleLayoutCount = 0;
6433 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
6435 const vk::VkImageLayout layout = layouts[layoutNdx];
6437 if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
6438 possibleLayoutCount++;
6441 size_t nextLayoutNdx = ((size_t)rng.getUint64()) % possibleLayoutCount;
6443 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
6445 const vk::VkImageLayout layout = layouts[layoutNdx];
6447 if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
6449 if (nextLayoutNdx == 0)
6456 DE_FATAL("Unreachable");
6457 return vk::VK_IMAGE_LAYOUT_UNDEFINED;
6460 void applyOp (State& state, const Memory& memory, Op op, Usage usage)
6465 DE_ASSERT(state.stage == STAGE_HOST);
6466 DE_ASSERT(!state.mapped);
6467 state.mapped = true;
6471 DE_ASSERT(state.stage == STAGE_HOST);
6472 DE_ASSERT(state.mapped);
6473 state.mapped = false;
6477 DE_ASSERT(state.stage == STAGE_HOST);
6478 DE_ASSERT(!state.hostFlushed);
6479 state.hostFlushed = true;
6482 case OP_MAP_INVALIDATE:
6483 DE_ASSERT(state.stage == STAGE_HOST);
6484 DE_ASSERT(!state.hostInvalidated);
6485 state.hostInvalidated = true;
6489 DE_ASSERT(state.stage == STAGE_HOST);
6490 DE_ASSERT(state.hostInvalidated);
6491 state.rng.getUint32();
6495 DE_ASSERT(state.stage == STAGE_HOST);
6496 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6497 state.hostFlushed = false;
6499 state.memoryDefined = true;
6500 state.imageDefined = false;
6501 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
6502 state.rng.getUint32();
6506 DE_ASSERT(state.stage == STAGE_HOST);
6507 DE_ASSERT(state.hostInvalidated);
6509 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6510 state.hostFlushed = false;
6512 state.rng.getUint32();
6515 case OP_BUFFER_CREATE:
6516 DE_ASSERT(state.stage == STAGE_HOST);
6517 DE_ASSERT(!state.hasBuffer);
6519 state.hasBuffer = true;
6522 case OP_BUFFER_DESTROY:
6523 DE_ASSERT(state.stage == STAGE_HOST);
6524 DE_ASSERT(state.hasBuffer);
6525 DE_ASSERT(state.hasBoundBufferMemory);
6527 state.hasBuffer = false;
6528 state.hasBoundBufferMemory = false;
6531 case OP_BUFFER_BINDMEMORY:
6532 DE_ASSERT(state.stage == STAGE_HOST);
6533 DE_ASSERT(state.hasBuffer);
6534 DE_ASSERT(!state.hasBoundBufferMemory);
6536 state.hasBoundBufferMemory = true;
6539 case OP_IMAGE_CREATE:
6540 DE_ASSERT(state.stage == STAGE_HOST);
6541 DE_ASSERT(!state.hasImage);
6542 DE_ASSERT(!state.hasBuffer);
6544 state.hasImage = true;
6547 case OP_IMAGE_DESTROY:
6548 DE_ASSERT(state.stage == STAGE_HOST);
6549 DE_ASSERT(state.hasImage);
6550 DE_ASSERT(state.hasBoundImageMemory);
6552 state.hasImage = false;
6553 state.hasBoundImageMemory = false;
6554 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
6555 state.imageDefined = false;
6558 case OP_IMAGE_BINDMEMORY:
6559 DE_ASSERT(state.stage == STAGE_HOST);
6560 DE_ASSERT(state.hasImage);
6561 DE_ASSERT(!state.hasBoundImageMemory);
6563 state.hasBoundImageMemory = true;
6566 case OP_IMAGE_TRANSITION_LAYOUT:
6568 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6569 DE_ASSERT(state.hasImage);
6570 DE_ASSERT(state.hasBoundImageMemory);
6572 // \todo [2016-03-09 mika] Support linear tiling and predefined data
6573 const vk::VkImageLayout srcLayout = state.rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
6574 const vk::VkImageLayout dstLayout = getRandomNextLayout(state.rng, usage, srcLayout);
6576 vk::VkPipelineStageFlags dirtySrcStages;
6577 vk::VkAccessFlags dirtySrcAccesses;
6578 vk::VkPipelineStageFlags dirtyDstStages;
6579 vk::VkAccessFlags dirtyDstAccesses;
6581 vk::VkPipelineStageFlags srcStages;
6582 vk::VkAccessFlags srcAccesses;
6583 vk::VkPipelineStageFlags dstStages;
6584 vk::VkAccessFlags dstAccesses;
6586 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6588 // Try masking some random bits
6589 srcStages = dirtySrcStages;
6590 srcAccesses = dirtySrcAccesses;
6592 dstStages = state.cache.getAllowedStages() & state.rng.getUint32();
6593 dstAccesses = state.cache.getAllowedAcceses() & state.rng.getUint32();
6595 // If there are no bits in dst stage mask use all stages
6596 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
6599 srcStages = dstStages;
6601 if (srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
6602 state.imageDefined = false;
6604 state.commandBufferIsEmpty = false;
6605 state.imageLayout = dstLayout;
6606 state.memoryDefined = false;
6607 state.cache.imageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
6611 case OP_QUEUE_WAIT_FOR_IDLE:
6612 DE_ASSERT(state.stage == STAGE_HOST);
6613 DE_ASSERT(!state.queueIdle);
6615 state.queueIdle = true;
6617 state.cache.waitForIdle();
6620 case OP_DEVICE_WAIT_FOR_IDLE:
6621 DE_ASSERT(state.stage == STAGE_HOST);
6622 DE_ASSERT(!state.deviceIdle);
6624 state.queueIdle = true;
6625 state.deviceIdle = true;
6627 state.cache.waitForIdle();
6630 case OP_COMMAND_BUFFER_BEGIN:
6631 DE_ASSERT(state.stage == STAGE_HOST);
6632 state.stage = STAGE_COMMAND_BUFFER;
6633 state.commandBufferIsEmpty = true;
6634 // Makes host writes visible to command buffer
6635 state.cache.submitCommandBuffer();
6638 case OP_COMMAND_BUFFER_END:
6639 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6640 state.stage = STAGE_HOST;
6641 state.queueIdle = false;
6642 state.deviceIdle = false;
6645 case OP_BUFFER_COPY_FROM_BUFFER:
6646 case OP_BUFFER_COPY_FROM_IMAGE:
6647 case OP_BUFFER_UPDATE:
6648 case OP_BUFFER_FILL:
6649 state.rng.getUint32();
6650 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6652 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6653 state.hostInvalidated = false;
6655 state.commandBufferIsEmpty = false;
6656 state.memoryDefined = true;
6657 state.imageDefined = false;
6658 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
6659 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
6662 case OP_BUFFER_COPY_TO_BUFFER:
6663 case OP_BUFFER_COPY_TO_IMAGE:
6664 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6666 state.commandBufferIsEmpty = false;
6667 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
6670 case OP_IMAGE_BLIT_FROM_IMAGE:
6671 state.rng.getBool();
6673 case OP_IMAGE_COPY_FROM_BUFFER:
6674 case OP_IMAGE_COPY_FROM_IMAGE:
6675 state.rng.getUint32();
6676 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6678 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6679 state.hostInvalidated = false;
6681 state.commandBufferIsEmpty = false;
6682 state.memoryDefined = false;
6683 state.imageDefined = true;
6684 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
6687 case OP_IMAGE_BLIT_TO_IMAGE:
6688 state.rng.getBool();
6690 case OP_IMAGE_COPY_TO_BUFFER:
6691 case OP_IMAGE_COPY_TO_IMAGE:
6692 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6694 state.commandBufferIsEmpty = false;
6695 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
6698 case OP_PIPELINE_BARRIER_GLOBAL:
6699 case OP_PIPELINE_BARRIER_BUFFER:
6700 case OP_PIPELINE_BARRIER_IMAGE:
6702 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6704 vk::VkPipelineStageFlags dirtySrcStages;
6705 vk::VkAccessFlags dirtySrcAccesses;
6706 vk::VkPipelineStageFlags dirtyDstStages;
6707 vk::VkAccessFlags dirtyDstAccesses;
6709 vk::VkPipelineStageFlags srcStages;
6710 vk::VkAccessFlags srcAccesses;
6711 vk::VkPipelineStageFlags dstStages;
6712 vk::VkAccessFlags dstAccesses;
6714 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6716 // Try masking some random bits
6717 srcStages = dirtySrcStages & state.rng.getUint32();
6718 srcAccesses = dirtySrcAccesses & state.rng.getUint32();
6720 dstStages = dirtyDstStages & state.rng.getUint32();
6721 dstAccesses = dirtyDstAccesses & state.rng.getUint32();
6723 // If there are no bits in stage mask use the original dirty stages
6724 srcStages = srcStages ? srcStages : dirtySrcStages;
6725 dstStages = dstStages ? dstStages : dirtyDstStages;
6728 srcStages = dstStages;
6730 state.commandBufferIsEmpty = false;
6731 state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
6735 case OP_RENDERPASS_BEGIN:
6737 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6739 state.renderPassIsEmpty = true;
6740 state.stage = STAGE_RENDER_PASS;
6744 case OP_RENDERPASS_END:
6746 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6748 state.renderPassIsEmpty = true;
6749 state.stage = STAGE_COMMAND_BUFFER;
6753 case OP_RENDER_VERTEX_BUFFER:
6755 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6757 state.renderPassIsEmpty = false;
6758 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
6762 case OP_RENDER_INDEX_BUFFER:
6764 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6766 state.renderPassIsEmpty = false;
6767 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
6771 case OP_RENDER_VERTEX_UNIFORM_BUFFER:
6772 case OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER:
6774 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6776 state.renderPassIsEmpty = false;
6777 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT);
6781 case OP_RENDER_VERTEX_STORAGE_BUFFER:
6783 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6785 state.renderPassIsEmpty = false;
6786 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
6791 DE_FATAL("Unknown op");
6795 de::MovePtr<Command> createHostCommand (Op op,
6798 vk::VkSharingMode sharing)
6802 case OP_MAP: return de::MovePtr<Command>(new Map());
6803 case OP_UNMAP: return de::MovePtr<Command>(new UnMap());
6805 case OP_MAP_FLUSH: return de::MovePtr<Command>(new Flush());
6806 case OP_MAP_INVALIDATE: return de::MovePtr<Command>(new Invalidate());
6808 case OP_MAP_READ: return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
6809 case OP_MAP_WRITE: return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
6810 case OP_MAP_MODIFY: return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
6812 case OP_BUFFER_CREATE: return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
6813 case OP_BUFFER_DESTROY: return de::MovePtr<Command>(new DestroyBuffer());
6814 case OP_BUFFER_BINDMEMORY: return de::MovePtr<Command>(new BindBufferMemory());
6816 case OP_IMAGE_CREATE: return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
6817 case OP_IMAGE_DESTROY: return de::MovePtr<Command>(new DestroyImage());
6818 case OP_IMAGE_BINDMEMORY: return de::MovePtr<Command>(new BindImageMemory());
6820 case OP_QUEUE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new QueueWaitIdle());
6821 case OP_DEVICE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new DeviceWaitIdle());
6824 DE_FATAL("Unknown op");
6825 return de::MovePtr<Command>(DE_NULL);
6829 de::MovePtr<CmdCommand> createCmdCommand (de::Random& rng,
6836 case OP_BUFFER_FILL: return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
6837 case OP_BUFFER_UPDATE: return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
6838 case OP_BUFFER_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
6839 case OP_BUFFER_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
6841 case OP_BUFFER_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyToImage());
6842 case OP_BUFFER_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
6844 case OP_IMAGE_TRANSITION_LAYOUT:
6846 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6847 DE_ASSERT(state.hasImage);
6848 DE_ASSERT(state.hasBoundImageMemory);
6850 const vk::VkImageLayout srcLayout = rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
6851 const vk::VkImageLayout dstLayout = getRandomNextLayout(rng, usage, srcLayout);
6853 vk::VkPipelineStageFlags dirtySrcStages;
6854 vk::VkAccessFlags dirtySrcAccesses;
6855 vk::VkPipelineStageFlags dirtyDstStages;
6856 vk::VkAccessFlags dirtyDstAccesses;
6858 vk::VkPipelineStageFlags srcStages;
6859 vk::VkAccessFlags srcAccesses;
6860 vk::VkPipelineStageFlags dstStages;
6861 vk::VkAccessFlags dstAccesses;
6863 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6865 // Try masking some random bits
6866 srcStages = dirtySrcStages;
6867 srcAccesses = dirtySrcAccesses;
6869 dstStages = state.cache.getAllowedStages() & rng.getUint32();
6870 dstAccesses = state.cache.getAllowedAcceses() & rng.getUint32();
6872 // If there are no bits in dst stage mask use all stages
6873 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
6876 srcStages = dstStages;
6878 return de::MovePtr<CmdCommand>(new ImageTransition(srcStages, srcAccesses, dstStages, dstAccesses, srcLayout, dstLayout));
6881 case OP_IMAGE_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyToBuffer(state.imageLayout));
6882 case OP_IMAGE_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32(), state.imageLayout));
6883 case OP_IMAGE_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyToImage(state.imageLayout));
6884 case OP_IMAGE_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32(), state.imageLayout));
6885 case OP_IMAGE_BLIT_TO_IMAGE:
6887 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6888 return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale, state.imageLayout));
6891 case OP_IMAGE_BLIT_FROM_IMAGE:
6893 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6894 return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale, state.imageLayout));
6897 case OP_PIPELINE_BARRIER_GLOBAL:
6898 case OP_PIPELINE_BARRIER_BUFFER:
6899 case OP_PIPELINE_BARRIER_IMAGE:
6901 vk::VkPipelineStageFlags dirtySrcStages;
6902 vk::VkAccessFlags dirtySrcAccesses;
6903 vk::VkPipelineStageFlags dirtyDstStages;
6904 vk::VkAccessFlags dirtyDstAccesses;
6906 vk::VkPipelineStageFlags srcStages;
6907 vk::VkAccessFlags srcAccesses;
6908 vk::VkPipelineStageFlags dstStages;
6909 vk::VkAccessFlags dstAccesses;
6911 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6913 // Try masking some random bits
6914 srcStages = dirtySrcStages & rng.getUint32();
6915 srcAccesses = dirtySrcAccesses & rng.getUint32();
6917 dstStages = dirtyDstStages & rng.getUint32();
6918 dstAccesses = dirtyDstAccesses & rng.getUint32();
6920 // If there are no bits in stage mask use the original dirty stages
6921 srcStages = srcStages ? srcStages : dirtySrcStages;
6922 dstStages = dstStages ? dstStages : dirtyDstStages;
6925 srcStages = dstStages;
6927 PipelineBarrier::Type type;
6929 if (op == OP_PIPELINE_BARRIER_IMAGE)
6930 type = PipelineBarrier::TYPE_IMAGE;
6931 else if (op == OP_PIPELINE_BARRIER_BUFFER)
6932 type = PipelineBarrier::TYPE_BUFFER;
6933 else if (op == OP_PIPELINE_BARRIER_GLOBAL)
6934 type = PipelineBarrier::TYPE_GLOBAL;
6937 type = PipelineBarrier::TYPE_LAST;
6938 DE_FATAL("Unknown op");
6941 if (type == PipelineBarrier::TYPE_IMAGE)
6942 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::just(state.imageLayout)));
6944 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::nothing<vk::VkImageLayout>()));
6948 DE_FATAL("Unknown op");
6949 return de::MovePtr<CmdCommand>(DE_NULL);
6953 de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
6959 case OP_RENDER_VERTEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexBuffer());
6960 case OP_RENDER_INDEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderIndexBuffer());
6961 case OP_RENDER_VERTEX_UNIFORM_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexUniformBuffer());
6962 case OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexUniformTexelBuffer());
6963 case OP_RENDER_VERTEX_STORAGE_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderVertexStorageBuffer());
6966 DE_FATAL("Unknown op");
6967 return de::MovePtr<RenderPassCommand>(DE_NULL);
6971 de::MovePtr<CmdCommand> createRenderPassCommands (const Memory& memory,
6972 de::Random& nextOpRng,
6978 vector<RenderPassCommand*> commands;
6982 for (; opNdx < opCount; opNdx++)
6986 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6988 DE_ASSERT(!ops.empty());
6991 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6993 if (op == OP_RENDERPASS_END)
6999 de::Random rng (state.rng);
7001 commands.push_back(createRenderPassCommand(rng, state, op).release());
7002 applyOp(state, memory, op, usage);
7004 DE_ASSERT(state.rng == rng);
7009 applyOp(state, memory, OP_RENDERPASS_END, usage);
7010 return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
7014 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
7015 delete commands[commandNdx];
7021 de::MovePtr<Command> createCmdCommands (const Memory& memory,
7022 de::Random& nextOpRng,
7028 vector<CmdCommand*> commands;
7032 for (; opNdx < opCount; opNdx++)
7036 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
7038 DE_ASSERT(!ops.empty());
7041 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
7043 if (op == OP_COMMAND_BUFFER_END)
7049 // \note Command needs to known the state before the operation
7050 if (op == OP_RENDERPASS_BEGIN)
7052 applyOp(state, memory, op, usage);
7053 commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
7057 de::Random rng (state.rng);
7059 commands.push_back(createCmdCommand(rng, state, op, usage).release());
7060 applyOp(state, memory, op, usage);
7062 DE_ASSERT(state.rng == rng);
7069 applyOp(state, memory, OP_COMMAND_BUFFER_END, usage);
7070 return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
7074 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
7075 delete commands[commandNdx];
7081 void createCommands (vector<Command*>& commands,
7083 const Memory& memory,
7085 vk::VkSharingMode sharingMode)
7087 const size_t opCount = 100;
7088 State state (usage, seed);
7089 // Used to select next operation only
7090 de::Random nextOpRng (seed ^ 12930809);
7092 commands.reserve(opCount);
7094 for (size_t opNdx = 0; opNdx < opCount; opNdx++)
7098 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
7100 DE_ASSERT(!ops.empty());
7103 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
7105 if (op == OP_COMMAND_BUFFER_BEGIN)
7107 applyOp(state, memory, op, usage);
7108 commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
7112 de::Random rng (state.rng);
7114 commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
7115 applyOp(state, memory, op, usage);
7117 // Make sure that random generator is in sync
7118 DE_ASSERT(state.rng == rng);
7123 // Clean up resources
7124 if (state.hasBuffer && state.hasImage)
7126 if (!state.queueIdle)
7127 commands.push_back(new QueueWaitIdle());
7129 if (state.hasBuffer)
7130 commands.push_back(new DestroyBuffer());
7133 commands.push_back(new DestroyImage());
7137 void testCommand (TestLog& log,
7138 tcu::ResultCollector& resultCollector,
7139 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection,
7140 const vk::InstanceInterface& vki,
7141 const vk::DeviceInterface& vkd,
7142 vk::VkPhysicalDevice physicalDevice,
7143 vk::VkDevice device,
7144 vk::VkDeviceSize size,
7145 deUint32 memoryTypeIndex,
7147 vk::VkSharingMode sharingMode,
7148 vk::VkQueue executionQueue,
7149 deUint32 executionQueueFamily,
7150 const vector<deUint32>& queueFamilies,
7151 const vk::VkDeviceSize maxBufferSize,
7152 const IVec2 maxImageSize)
7154 const deUint32 seed = 2830980989u;
7155 Memory memory (vki, vkd, physicalDevice, device, size, memoryTypeIndex, maxBufferSize, maxImageSize[0], maxImageSize[1]);
7156 vector<Command*> commands;
7157 vector<pair<deUint32, vk::VkQueue> > queues;
7161 log << TestLog::Message << "Create commands" << TestLog::EndMessage;
7162 createCommands(commands, seed, memory, usage, sharingMode);
7164 for (size_t queueNdx = 0; queueNdx < queueFamilies.size(); queueNdx++)
7168 vkd.getDeviceQueue(device, queueFamilies[queueNdx], 0, &queue);
7169 queues.push_back(std::make_pair(queueFamilies[queueNdx], queue));
7173 const tcu::ScopedLogSection section (log, "LogPrepare", "LogPrepare");
7175 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
7176 commands[cmdNdx]->logPrepare(log, cmdNdx);
7180 const tcu::ScopedLogSection section (log, "LogExecute", "LogExecute");
7182 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
7183 commands[cmdNdx]->logExecute(log, cmdNdx);
7187 const Context context (vki, vkd, physicalDevice, device, executionQueue, executionQueueFamily, queues, binaryCollection);
7192 PrepareContext prepareContext (context, memory);
7194 log << TestLog::Message << "Begin prepare" << TestLog::EndMessage;
7196 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
7198 Command& command = *commands[cmdNdx];
7202 command.prepare(prepareContext);
7206 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare for execution");
7211 ExecuteContext executeContext (context);
7213 log << TestLog::Message << "Begin execution" << TestLog::EndMessage;
7215 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
7217 Command& command = *commands[cmdNdx];
7221 command.execute(executeContext);
7225 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute");
7230 VK_CHECK(vkd.deviceWaitIdle(device));
7234 const tcu::ScopedLogSection section (log, "Verify", "Verify");
7235 VerifyContext verifyContext (log, resultCollector, context, size);
7237 log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
7239 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
7241 Command& command = *commands[cmdNdx];
7245 command.verify(verifyContext, cmdNdx);
7249 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed verification");
7255 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
7257 delete commands[commandNdx];
7258 commands[commandNdx] = DE_NULL;
7263 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
7265 delete commands[commandNdx];
7266 commands[commandNdx] = DE_NULL;
7275 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
7277 delete commands[commandNdx];
7278 commands[commandNdx] = DE_NULL;
7284 class MemoryTestInstance : public TestInstance
7288 MemoryTestInstance (::vkt::Context& context, const TestConfig& config);
7290 tcu::TestStatus iterate (void);
7293 const TestConfig m_config;
7294 const vk::VkPhysicalDeviceMemoryProperties m_memoryProperties;
7295 deUint32 m_memoryTypeNdx;
7296 tcu::ResultCollector m_resultCollector;
7299 MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
7300 : TestInstance (context)
7302 , m_memoryProperties (vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
7303 , m_memoryTypeNdx (0)
7304 , m_resultCollector (context.getTestContext().getLog())
7306 TestLog& log = context.getTestContext().getLog();
7308 const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
7310 log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
7311 log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
7312 log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
7316 const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
7318 for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
7320 const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
7322 log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
7323 log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
7326 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
7328 const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
7330 log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
7331 log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
7336 tcu::TestStatus MemoryTestInstance::iterate (void)
7338 // \todo [2016-03-09 mika] Split different stages over multiple iterations
7339 if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
7341 TestLog& log = m_context.getTestContext().getLog();
7342 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx), "Memory type " + de::toString(m_memoryTypeNdx));
7343 const vk::InstanceInterface& vki = m_context.getInstanceInterface();
7344 const vk::VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
7345 const vk::DeviceInterface& vkd = m_context.getDeviceInterface();
7346 const vk::VkDevice device = m_context.getDevice();
7347 const vk::VkQueue queue = m_context.getUniversalQueue();
7348 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
7349 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
7350 vector<deUint32> queues;
7352 queues.push_back(queueFamilyIndex);
7354 if (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)
7355 && !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
7357 log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
7360 return tcu::TestStatus::incomplete();
7366 const vk::VkBufferUsageFlags bufferUsage = usageToBufferUsageFlags(m_config.usage);
7367 const vk::VkImageUsageFlags imageUsage = usageToImageUsageFlags(m_config.usage);
7368 const vk::VkDeviceSize maxBufferSize = bufferUsage != 0
7369 ? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
7371 const IVec2 maxImageSize = imageUsage != 0
7372 ? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
7375 log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
7376 log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
7378 // Skip tests if there are no supported operations
7379 if (maxBufferSize == 0
7380 && maxImageSize[0] == 0
7381 && (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)) == 0)
7383 log << TestLog::Message << "Skipping memory type. None of the usages are supported." << TestLog::EndMessage;
7387 testCommand(log, m_resultCollector, m_context.getBinaryCollection(), vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, m_config.usage, m_config.sharing, queue, queueFamilyIndex, queues, maxBufferSize, maxImageSize);
7390 catch (const tcu::TestError& e)
7392 m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
7396 return tcu::TestStatus::incomplete();
7400 return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
7405 void init (vk::SourceCollections& sources, TestConfig config) const
7407 // Vertex buffer rendering
7408 if (config.usage & USAGE_VERTEX_BUFFER)
7410 const char* const vertexShader =
7412 "layout(location = 0) in highp vec2 a_position;\n"
7413 "void main (void) {\n"
7414 "\tgl_PointSize = 1.0;\n"
7415 "\tgl_Position = vec4(1.998 * a_position - vec2(0.999), 0.0, 1.0);\n"
7418 sources.glslSources.add("vertex-buffer.vert")
7419 << glu::VertexSource(vertexShader);
7422 // Index buffer rendering
7423 if (config.usage & USAGE_INDEX_BUFFER)
7425 const char* const vertexShader =
7428 "void main (void) {\n"
7429 "\tgl_PointSize = 1.0;\n"
7430 "\thighp vec2 pos = vec2(gl_VertexIndex % 256, gl_VertexIndex / 256) / vec2(255.0);\n"
7431 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
7434 sources.glslSources.add("index-buffer.vert")
7435 << glu::VertexSource(vertexShader);
7438 if (config.usage & USAGE_UNIFORM_BUFFER)
7440 std::ostringstream vertexShader;
7445 "layout(set=0, binding=0) uniform Block\n"
7447 "\thighp uvec4 values[" << de::toString<size_t>(MAX_UNIFORM_BUFFER_SIZE / (sizeof(deUint32) * 4)) << "];\n"
7449 "void main (void) {\n"
7450 "\tgl_PointSize = 1.0;\n"
7451 "\thighp uvec4 vecVal = block.values[gl_VertexIndex / 8];\n"
7452 "\thighp uint val;\n"
7453 "\tif (((gl_VertexIndex / 2) % 4 == 0))\n"
7454 "\t\tval = vecVal.x;\n"
7455 "\telse if (((gl_VertexIndex / 2) % 4 == 1))\n"
7456 "\t\tval = vecVal.y;\n"
7457 "\telse if (((gl_VertexIndex / 2) % 4 == 2))\n"
7458 "\t\tval = vecVal.z;\n"
7459 "\telse if (((gl_VertexIndex / 2) % 4 == 3))\n"
7460 "\t\tval = vecVal.w;\n"
7461 "\tif ((gl_VertexIndex % 2) == 0)\n"
7462 "\t\tval = val & 0xFFFFu;\n"
7464 "\t\tval = val >> 16u;\n"
7465 "\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
7466 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
7469 sources.glslSources.add("uniform-buffer.vert")
7470 << glu::VertexSource(vertexShader.str());
7473 if (config.usage & USAGE_STORAGE_BUFFER)
7475 // Vertex uniform buffer rendering
7476 const string vertexShader =
7479 "layout(set=0, binding=0) buffer Block\n"
7481 "\thighp uvec4 values[];\n"
7483 "void main (void) {\n"
7484 "\tgl_PointSize = 1.0;\n"
7485 "\thighp uvec4 vecVal = block.values[gl_VertexIndex / 8];\n"
7486 "\thighp uint val;\n"
7487 "\tif (((gl_VertexIndex / 2) % 4 == 0))\n"
7488 "\t\tval = vecVal.x;\n"
7489 "\telse if (((gl_VertexIndex / 2) % 4 == 1))\n"
7490 "\t\tval = vecVal.y;\n"
7491 "\telse if (((gl_VertexIndex / 2) % 4 == 2))\n"
7492 "\t\tval = vecVal.z;\n"
7493 "\telse if (((gl_VertexIndex / 2) % 4 == 3))\n"
7494 "\t\tval = vecVal.w;\n"
7495 "\tif ((gl_VertexIndex % 2) == 0)\n"
7496 "\t\tval = val & 0xFFFFu;\n"
7498 "\t\tval = val >> 16u;\n"
7499 "\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
7500 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
7503 sources.glslSources.add("storage-buffer.vert")
7504 << glu::VertexSource(vertexShader);
7507 if (config.usage & USAGE_UNIFORM_TEXEL_BUFFER)
7509 // Vertex uniform texel buffer rendering
7510 const char* const vertexShader =
7512 "#extension GL_EXT_texture_buffer : require\n"
7514 "layout(set=0, binding=0) uniform highp usamplerBuffer u_sampler;\n"
7515 "void main (void) {\n"
7516 "\tgl_PointSize = 1.0;\n"
7517 "\thighp uint val = texelFetch(u_sampler, gl_VertexIndex).x;\n"
7518 "\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
7519 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
7522 sources.glslSources.add("uniform-texel-buffer.vert")
7523 << glu::VertexSource(vertexShader);
7527 const char* const fragmentShader =
7529 "layout(location = 0) out highp vec4 o_color;\n"
7530 "void main (void) {\n"
7531 "\to_color = vec4(1.0);\n"
7534 sources.glslSources.add("render-white.frag")
7535 << glu::FragmentSource(fragmentShader);
7542 tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
7544 de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
7545 const vk::VkDeviceSize sizes[] =
7552 const Usage usages[] =
7558 USAGE_VERTEX_BUFFER,
7560 USAGE_UNIFORM_BUFFER,
7561 USAGE_UNIFORM_TEXEL_BUFFER,
7562 USAGE_STORAGE_BUFFER,
7564 const Usage readUsages[] =
7568 USAGE_VERTEX_BUFFER,
7570 USAGE_UNIFORM_BUFFER,
7571 USAGE_UNIFORM_TEXEL_BUFFER,
7572 USAGE_STORAGE_BUFFER,
7575 const Usage writeUsages[] =
7581 for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
7583 const Usage writeUsage = writeUsages[writeUsageNdx];
7585 for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
7587 const Usage readUsage = readUsages[readUsageNdx];
7588 const Usage usage = writeUsage | readUsage;
7589 const string usageGroupName (usageToName(usage));
7590 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
7592 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
7594 const vk::VkDeviceSize size = sizes[sizeNdx];
7595 const string testName (de::toString((deUint64)(size)));
7596 const TestConfig config =
7600 vk::VK_SHARING_MODE_EXCLUSIVE
7603 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
7606 group->addChild(usageGroup.get());
7607 usageGroup.release();
7612 Usage all = (Usage)0;
7614 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
7615 all = all | usages[usageNdx];
7618 const string usageGroupName ("all");
7619 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
7621 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
7623 const vk::VkDeviceSize size = sizes[sizeNdx];
7624 const string testName (de::toString((deUint64)(size)));
7625 const TestConfig config =
7629 vk::VK_SHARING_MODE_EXCLUSIVE
7632 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
7635 group->addChild(usageGroup.get());
7636 usageGroup.release();
7640 const string usageGroupName ("all_device");
7641 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
7643 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
7645 const vk::VkDeviceSize size = sizes[sizeNdx];
7646 const string testName (de::toString((deUint64)(size)));
7647 const TestConfig config =
7649 (Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
7651 vk::VK_SHARING_MODE_EXCLUSIVE
7654 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
7657 group->addChild(usageGroup.get());
7658 usageGroup.release();
7662 return group.release();