1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2015 Google Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
21 * \brief Pipeline barrier tests
22 *//*--------------------------------------------------------------------*/
24 #include "vktMemoryPipelineBarrierTests.hpp"
26 #include "vktTestCaseUtil.hpp"
29 #include "vkPlatform.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkQueryUtil.hpp"
32 #include "vkMemUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkPrograms.hpp"
36 #include "tcuMaybe.hpp"
37 #include "tcuTextureUtil.hpp"
38 #include "tcuTestLog.hpp"
39 #include "tcuResultCollector.hpp"
40 #include "tcuTexture.hpp"
41 #include "tcuImageCompare.hpp"
43 #include "deUniquePtr.hpp"
44 #include "deStringUtil.hpp"
45 #include "deRandom.hpp"
56 // \todo [2016-03-09 mika] Check bufferImageGranularity
70 using tcu::ConstPixelBufferAccess;
71 using tcu::PixelBufferAccess;
72 using tcu::TextureFormat;
73 using tcu::TextureLevel;
83 ALL_PIPELINE_STAGES = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
84 | vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
85 | vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
86 | vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
87 | vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
88 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
89 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
90 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
91 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
92 | vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
93 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
94 | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
95 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
96 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT
97 | vk::VK_PIPELINE_STAGE_HOST_BIT
102 ALL_ACCESSES = vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT
103 | vk::VK_ACCESS_INDEX_READ_BIT
104 | vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
105 | vk::VK_ACCESS_UNIFORM_READ_BIT
106 | vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
107 | vk::VK_ACCESS_SHADER_READ_BIT
108 | vk::VK_ACCESS_SHADER_WRITE_BIT
109 | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
110 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
111 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
112 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
113 | vk::VK_ACCESS_TRANSFER_READ_BIT
114 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
115 | vk::VK_ACCESS_HOST_READ_BIT
116 | vk::VK_ACCESS_HOST_WRITE_BIT
117 | vk::VK_ACCESS_MEMORY_READ_BIT
118 | vk::VK_ACCESS_MEMORY_WRITE_BIT
123 // Mapped host read and write
124 USAGE_HOST_READ = (0x1u<<0),
125 USAGE_HOST_WRITE = (0x1u<<1),
127 // Copy and other transfer operations
128 USAGE_TRANSFER_SRC = (0x1u<<2),
129 USAGE_TRANSFER_DST = (0x1u<<3),
131 // Buffer usage flags
132 USAGE_INDEX_BUFFER = (0x1u<<4),
133 USAGE_VERTEX_BUFFER = (0x1u<<5),
135 USAGE_UNIFORM_BUFFER = (0x1u<<6),
136 USAGE_STORAGE_BUFFER = (0x1u<<7),
138 USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
139 USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
141 // \todo [2016-03-09 mika] This is probably almost impossible to do
142 USAGE_INDIRECT_BUFFER = (0x1u<<10),
144 // Texture usage flags
145 USAGE_TEXTURE_SAMPLED = (0x1u<<11),
146 USAGE_TEXTURE_STORAGE = (0x1u<<12),
147 USAGE_COLOR_ATTACHMENT = (0x1u<<13),
148 USAGE_INPUT_ATTACHMENT = (0x1u<<14),
149 USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
152 bool supportsDeviceBufferWrites (Usage usage)
154 if (usage & USAGE_TRANSFER_DST)
157 if (usage & USAGE_STORAGE_BUFFER)
160 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
166 bool supportsDeviceImageWrites (Usage usage)
168 if (usage & USAGE_TRANSFER_DST)
171 if (usage & USAGE_TEXTURE_STORAGE)
174 if (usage & USAGE_COLOR_ATTACHMENT)
180 // Sequential access enums
183 ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
184 ACCESS_INDEX_READ_BIT,
185 ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
186 ACCESS_UNIFORM_READ_BIT,
187 ACCESS_INPUT_ATTACHMENT_READ_BIT,
188 ACCESS_SHADER_READ_BIT,
189 ACCESS_SHADER_WRITE_BIT,
190 ACCESS_COLOR_ATTACHMENT_READ_BIT,
191 ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
192 ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
193 ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
194 ACCESS_TRANSFER_READ_BIT,
195 ACCESS_TRANSFER_WRITE_BIT,
196 ACCESS_HOST_READ_BIT,
197 ACCESS_HOST_WRITE_BIT,
198 ACCESS_MEMORY_READ_BIT,
199 ACCESS_MEMORY_WRITE_BIT,
204 // Sequential stage enums
207 PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
208 PIPELINESTAGE_BOTTOM_OF_PIPE_BIT,
209 PIPELINESTAGE_DRAW_INDIRECT_BIT,
210 PIPELINESTAGE_VERTEX_INPUT_BIT,
211 PIPELINESTAGE_VERTEX_SHADER_BIT,
212 PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
213 PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
214 PIPELINESTAGE_GEOMETRY_SHADER_BIT,
215 PIPELINESTAGE_FRAGMENT_SHADER_BIT,
216 PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
217 PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
218 PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
219 PIPELINESTAGE_COMPUTE_SHADER_BIT,
220 PIPELINESTAGE_TRANSFER_BIT,
221 PIPELINESTAGE_HOST_BIT,
226 PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
230 case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT: return PIPELINESTAGE_TOP_OF_PIPE_BIT;
231 case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT: return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
232 case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT: return PIPELINESTAGE_DRAW_INDIRECT_BIT;
233 case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT: return PIPELINESTAGE_VERTEX_INPUT_BIT;
234 case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT: return PIPELINESTAGE_VERTEX_SHADER_BIT;
235 case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
236 case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
237 case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT: return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
238 case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT: return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
239 case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
240 case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
241 case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
242 case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: return PIPELINESTAGE_COMPUTE_SHADER_BIT;
243 case vk::VK_PIPELINE_STAGE_TRANSFER_BIT: return PIPELINESTAGE_TRANSFER_BIT;
244 case vk::VK_PIPELINE_STAGE_HOST_BIT: return PIPELINESTAGE_HOST_BIT;
247 DE_FATAL("Unknown pipeline stage flags");
248 return PIPELINESTAGE_LAST;
252 Usage operator| (Usage a, Usage b)
254 return (Usage)((deUint32)a | (deUint32)b);
257 Usage operator& (Usage a, Usage b)
259 return (Usage)((deUint32)a & (deUint32)b);
262 string usageToName (Usage usage)
267 const char* const name;
270 { USAGE_HOST_READ, "host_read" },
271 { USAGE_HOST_WRITE, "host_write" },
273 { USAGE_TRANSFER_SRC, "transfer_src" },
274 { USAGE_TRANSFER_DST, "transfer_dst" },
276 { USAGE_INDEX_BUFFER, "index_buffer" },
277 { USAGE_VERTEX_BUFFER, "vertex_buffer" },
278 { USAGE_UNIFORM_BUFFER, "uniform_buffer" },
279 { USAGE_STORAGE_BUFFER, "storage_buffer" },
280 { USAGE_UNIFORM_TEXEL_BUFFER, "uniform_texel_buffer" },
281 { USAGE_STORAGE_TEXEL_BUFFER, "storage_texel_buffer" },
282 { USAGE_INDIRECT_BUFFER, "indirect_buffer" },
283 { USAGE_TEXTURE_SAMPLED, "sampled_texture" },
284 { USAGE_TEXTURE_STORAGE, "texture_storage" },
285 { USAGE_COLOR_ATTACHMENT, "color_attachment" },
286 { USAGE_INPUT_ATTACHMENT, "input_attachment" },
287 { USAGE_DEPTH_STENCIL_ATTACHMENT, "depth_stencil_attachment" },
290 std::ostringstream stream;
293 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
295 if (usage & usageNames[usageNdx].usage)
302 stream << usageNames[usageNdx].name;
309 vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
311 vk::VkBufferUsageFlags flags = 0;
313 if (usage & USAGE_TRANSFER_SRC)
314 flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
316 if (usage & USAGE_TRANSFER_DST)
317 flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
319 if (usage & USAGE_INDEX_BUFFER)
320 flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
322 if (usage & USAGE_VERTEX_BUFFER)
323 flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
325 if (usage & USAGE_INDIRECT_BUFFER)
326 flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
328 if (usage & USAGE_UNIFORM_BUFFER)
329 flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
331 if (usage & USAGE_STORAGE_BUFFER)
332 flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
334 if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
335 flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
337 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
338 flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
343 vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
345 vk::VkImageUsageFlags flags = 0;
347 if (usage & USAGE_TRANSFER_SRC)
348 flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
350 if (usage & USAGE_TRANSFER_DST)
351 flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
353 if (usage & USAGE_TEXTURE_SAMPLED)
354 flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
356 if (usage & USAGE_TEXTURE_STORAGE)
357 flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
359 if (usage & USAGE_COLOR_ATTACHMENT)
360 flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
362 if (usage & USAGE_INPUT_ATTACHMENT)
363 flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
365 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
366 flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
371 vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
373 vk::VkPipelineStageFlags flags = 0;
375 if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
376 flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
378 if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
379 flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
381 if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
382 flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
384 if (usage & USAGE_INDIRECT_BUFFER)
385 flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
388 (USAGE_UNIFORM_BUFFER
389 | USAGE_STORAGE_BUFFER
390 | USAGE_UNIFORM_TEXEL_BUFFER
391 | USAGE_STORAGE_TEXEL_BUFFER
392 | USAGE_TEXTURE_SAMPLED
393 | USAGE_TEXTURE_STORAGE))
395 flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
396 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
397 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
398 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
399 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
400 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
403 if (usage & USAGE_INPUT_ATTACHMENT)
404 flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
406 if (usage & USAGE_COLOR_ATTACHMENT)
407 flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
409 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
411 flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
412 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
418 vk::VkAccessFlags usageToAccessFlags (Usage usage)
420 vk::VkAccessFlags flags = 0;
422 if (usage & USAGE_HOST_READ)
423 flags |= vk::VK_ACCESS_HOST_READ_BIT;
425 if (usage & USAGE_HOST_WRITE)
426 flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
428 if (usage & USAGE_TRANSFER_SRC)
429 flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
431 if (usage & USAGE_TRANSFER_DST)
432 flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
434 if (usage & USAGE_INDEX_BUFFER)
435 flags |= vk::VK_ACCESS_INDEX_READ_BIT;
437 if (usage & USAGE_VERTEX_BUFFER)
438 flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
440 if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
441 flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
443 if (usage & (USAGE_STORAGE_BUFFER
444 | USAGE_STORAGE_TEXEL_BUFFER
445 | USAGE_TEXTURE_SAMPLED
446 | USAGE_TEXTURE_STORAGE))
447 flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
449 if (usage & USAGE_INDIRECT_BUFFER)
450 flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
452 if (usage & USAGE_COLOR_ATTACHMENT)
453 flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
455 if (usage & USAGE_INPUT_ATTACHMENT)
456 flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
458 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
459 flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
460 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
468 vk::VkDeviceSize size;
469 vk::VkSharingMode sharing;
472 vk::Move<vk::VkCommandBuffer> createCommandBuffer (const vk::DeviceInterface& vkd,
474 vk::VkCommandPool pool,
475 vk::VkCommandBufferLevel level)
477 const vk::VkCommandBufferAllocateInfo bufferInfo =
479 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
487 return vk::allocateCommandBuffer(vkd, device, &bufferInfo);
490 vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface& vkd,
492 vk::VkCommandPool pool,
493 vk::VkCommandBufferLevel level)
495 const vk::VkCommandBufferInheritanceInfo inheritInfo =
497 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
506 const vk::VkCommandBufferBeginInfo beginInfo =
508 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
511 (level == vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY ? &inheritInfo : (const vk::VkCommandBufferInheritanceInfo*)DE_NULL),
514 vk::Move<vk::VkCommandBuffer> commandBuffer (createCommandBuffer(vkd, device, pool, level));
516 vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
518 return commandBuffer;
521 vk::Move<vk::VkCommandPool> createCommandPool (const vk::DeviceInterface& vkd,
523 deUint32 queueFamilyIndex)
525 const vk::VkCommandPoolCreateInfo poolInfo =
527 vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
530 vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
534 return vk::createCommandPool(vkd, device, &poolInfo);
537 vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface& vkd,
539 vk::VkDeviceSize size,
540 vk::VkBufferUsageFlags usage,
541 vk::VkSharingMode sharingMode,
542 const vector<deUint32>& queueFamilies)
544 const vk::VkBufferCreateInfo createInfo =
546 vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
553 (deUint32)queueFamilies.size(),
557 return vk::createBuffer(vkd, device, &createInfo);
560 vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface& vkd,
562 vk::VkDeviceSize size,
563 deUint32 memoryTypeIndex)
565 const vk::VkMemoryAllocateInfo alloc =
567 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
574 return vk::allocateMemory(vkd, device, &alloc);
577 vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface& vki,
578 const vk::DeviceInterface& vkd,
579 vk::VkPhysicalDevice physicalDevice,
582 vk::VkMemoryPropertyFlags properties)
584 const vk::VkMemoryRequirements memoryRequirements = vk::getBufferMemoryRequirements(vkd, device, buffer);
585 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
586 deUint32 memoryTypeIndex;
588 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
590 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
591 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
595 const vk::VkMemoryAllocateInfo allocationInfo =
597 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
599 memoryRequirements.size,
602 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
604 VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
608 catch (const vk::Error& error)
610 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
611 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
613 // Try next memory type/heap if out of memory
617 // Throw all other errors forward
624 TCU_FAIL("Failed to allocate memory for buffer");
627 vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface& vki,
628 const vk::DeviceInterface& vkd,
629 vk::VkPhysicalDevice physicalDevice,
632 vk::VkMemoryPropertyFlags properties)
634 const vk::VkMemoryRequirements memoryRequirements = vk::getImageMemoryRequirements(vkd, device, image);
635 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
636 deUint32 memoryTypeIndex;
638 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
640 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
641 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
645 const vk::VkMemoryAllocateInfo allocationInfo =
647 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
649 memoryRequirements.size,
652 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
654 VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
658 catch (const vk::Error& error)
660 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
661 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
663 // Try next memory type/heap if out of memory
667 // Throw all other errors forward
674 TCU_FAIL("Failed to allocate memory for image");
677 void queueRun (const vk::DeviceInterface& vkd,
679 vk::VkCommandBuffer commandBuffer)
681 const vk::VkSubmitInfo submitInfo =
683 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
688 (const vk::VkPipelineStageFlags*)DE_NULL,
697 VK_CHECK(vkd.queueSubmit(queue, 1, &submitInfo, 0));
698 VK_CHECK(vkd.queueWaitIdle(queue));
701 void* mapMemory (const vk::DeviceInterface& vkd,
703 vk::VkDeviceMemory memory,
704 vk::VkDeviceSize size)
708 VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
713 class ReferenceMemory
716 ReferenceMemory (size_t size);
718 void set (size_t pos, deUint8 val);
719 deUint8 get (size_t pos) const;
720 bool isDefined (size_t pos) const;
722 void setDefined (size_t offset, size_t size, const void* data);
723 void setUndefined (size_t offset, size_t size);
724 void setData (size_t offset, size_t size, const void* data);
726 size_t getSize (void) const { return m_data.size(); }
729 vector<deUint8> m_data;
730 vector<deUint64> m_defined;
733 ReferenceMemory::ReferenceMemory (size_t size)
735 , m_defined (size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
739 void ReferenceMemory::set (size_t pos, deUint8 val)
742 m_defined[pos / 64] |= 0x1ull << (pos % 64);
745 void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
747 const deUint8* data = (const deUint8*)data_;
749 // \todo [2016-03-09 mika] Optimize
750 for (size_t pos = 0; pos < size; pos++)
752 m_data[offset + pos] = data[pos];
753 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
757 void ReferenceMemory::setUndefined (size_t offset, size_t size)
759 // \todo [2016-03-09 mika] Optimize
760 for (size_t pos = 0; pos < size; pos++)
761 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
764 deUint8 ReferenceMemory::get (size_t pos) const
766 DE_ASSERT(isDefined(pos));
770 bool ReferenceMemory::isDefined (size_t pos) const
772 return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
778 Memory (const vk::InstanceInterface& vki,
779 const vk::DeviceInterface& vkd,
780 vk::VkPhysicalDevice physicalDevice,
782 vk::VkDeviceSize size,
783 deUint32 memoryTypeIndex,
784 vk::VkDeviceSize maxBufferSize,
785 deInt32 maxImageWidth,
786 deInt32 maxImageHeight);
788 vk::VkDeviceSize getSize (void) const { return m_size; }
789 vk::VkDeviceSize getMaxBufferSize (void) const { return m_maxBufferSize; }
790 bool getSupportBuffers (void) const { return m_maxBufferSize > 0; }
792 deInt32 getMaxImageWidth (void) const { return m_maxImageWidth; }
793 deInt32 getMaxImageHeight (void) const { return m_maxImageHeight; }
794 bool getSupportImages (void) const { return m_maxImageWidth > 0; }
796 const vk::VkMemoryType& getMemoryType (void) const { return m_memoryType; }
797 deUint32 getMemoryTypeIndex (void) const { return m_memoryTypeIndex; }
798 vk::VkDeviceMemory getMemory (void) const { return *m_memory; }
801 const vk::VkDeviceSize m_size;
802 const deUint32 m_memoryTypeIndex;
803 const vk::VkMemoryType m_memoryType;
804 const vk::Unique<vk::VkDeviceMemory> m_memory;
805 const vk::VkDeviceSize m_maxBufferSize;
806 const deInt32 m_maxImageWidth;
807 const deInt32 m_maxImageHeight;
810 vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface& vki,
811 vk::VkPhysicalDevice device,
812 deUint32 memoryTypeIndex)
814 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
816 DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
818 return memoryProperties.memoryTypes[memoryTypeIndex];
821 vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface& vkd,
824 vk::VkBufferUsageFlags usage,
825 vk::VkSharingMode sharingMode,
826 const vector<deUint32>& queueFamilies,
828 vk::VkDeviceSize memorySize,
829 deUint32 memoryTypeIndex)
831 vk::VkDeviceSize lastSuccess = 0;
832 vk::VkDeviceSize currentSize = memorySize / 2;
835 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
836 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
838 if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
842 for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
844 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
845 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
847 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
849 lastSuccess = currentSize;
850 currentSize += stepSize;
853 currentSize -= stepSize;
862 // Round size down maximum W * H * 4, where W and H < 4096
863 vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
865 const vk::VkDeviceSize maxTextureSize = 4096;
866 vk::VkDeviceSize maxTexelCount = size / 4;
867 vk::VkDeviceSize bestW = de::max(maxTexelCount, maxTextureSize);
868 vk::VkDeviceSize bestH = maxTexelCount / bestW;
870 // \todo [2016-03-09 mika] Could probably be faster?
871 for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
873 const vk::VkDeviceSize h = maxTexelCount / w;
875 if (bestW * bestH < w * h)
882 return bestW * bestH * 4;
885 // Find RGBA8 image size that has exactly "size" of number of bytes.
886 // "size" must be W * H * 4 where W and H < 4096
887 IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
889 const vk::VkDeviceSize maxTextureSize = 4096;
890 vk::VkDeviceSize texelCount = size / 4;
892 DE_ASSERT((size % 4) == 0);
894 // \todo [2016-03-09 mika] Could probably be faster?
895 for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
897 const vk::VkDeviceSize h = texelCount / w;
899 if ((texelCount % w) == 0 && h < maxTextureSize)
900 return IVec2((int)w, (int)h);
903 DE_FATAL("Invalid size");
904 return IVec2(-1, -1);
907 IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface& vkd,
910 vk::VkImageUsageFlags usage,
911 vk::VkSharingMode sharingMode,
912 const vector<deUint32>& queueFamilies,
914 vk::VkDeviceSize memorySize,
915 deUint32 memoryTypeIndex)
917 IVec2 lastSuccess (0);
921 const deUint32 texelCount = (deUint32)(memorySize / 4);
922 const deUint32 width = (deUint32)deFloatSqrt((float)texelCount);
923 const deUint32 height = texelCount / width;
925 currentSize[0] = deMaxu32(width, height);
926 currentSize[1] = deMinu32(width, height);
929 for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
931 const vk::VkImageCreateInfo createInfo =
933 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
937 vk::VK_IMAGE_TYPE_2D,
938 vk::VK_FORMAT_R8G8B8A8_UNORM,
940 (deUint32)currentSize[0],
941 (deUint32)currentSize[1],
945 vk::VK_SAMPLE_COUNT_1_BIT,
946 vk::VK_IMAGE_TILING_OPTIMAL,
949 (deUint32)queueFamilies.size(),
951 vk::VK_IMAGE_LAYOUT_UNDEFINED
953 const vk::Unique<vk::VkImage> image (vk::createImage(vkd, device, &createInfo));
954 const vk::VkMemoryRequirements requirements (vk::getImageMemoryRequirements(vkd, device, *image));
956 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
958 lastSuccess = currentSize;
959 currentSize[0] += stepSize;
960 currentSize[1] += stepSize;
964 currentSize[0] -= stepSize;
965 currentSize[1] -= stepSize;
975 Memory::Memory (const vk::InstanceInterface& vki,
976 const vk::DeviceInterface& vkd,
977 vk::VkPhysicalDevice physicalDevice,
979 vk::VkDeviceSize size,
980 deUint32 memoryTypeIndex,
981 vk::VkDeviceSize maxBufferSize,
982 deInt32 maxImageWidth,
983 deInt32 maxImageHeight)
985 , m_memoryTypeIndex (memoryTypeIndex)
986 , m_memoryType (getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
987 , m_memory (allocMemory(vkd, device, size, memoryTypeIndex))
988 , m_maxBufferSize (maxBufferSize)
989 , m_maxImageWidth (maxImageWidth)
990 , m_maxImageHeight (maxImageHeight)
997 Context (const vk::InstanceInterface& vki,
998 const vk::DeviceInterface& vkd,
999 vk::VkPhysicalDevice physicalDevice,
1000 vk::VkDevice device,
1002 deUint32 queueFamilyIndex,
1003 const vector<pair<deUint32, vk::VkQueue> >& queues,
1004 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection)
1007 , m_physicalDevice (physicalDevice)
1010 , m_queueFamilyIndex (queueFamilyIndex)
1012 , m_commandPool (createCommandPool(vkd, device, queueFamilyIndex))
1013 , m_binaryCollection (binaryCollection)
1015 for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
1016 m_queueFamilies.push_back(m_queues[queueNdx].first);
1019 const vk::InstanceInterface& getInstanceInterface (void) const { return m_vki; }
1020 vk::VkPhysicalDevice getPhysicalDevice (void) const { return m_physicalDevice; }
1021 vk::VkDevice getDevice (void) const { return m_device; }
1022 const vk::DeviceInterface& getDeviceInterface (void) const { return m_vkd; }
1023 vk::VkQueue getQueue (void) const { return m_queue; }
1024 deUint32 getQueueFamily (void) const { return m_queueFamilyIndex; }
1025 const vector<pair<deUint32, vk::VkQueue> >& getQueues (void) const { return m_queues; }
1026 const vector<deUint32> getQueueFamilies (void) const { return m_queueFamilies; }
1027 vk::VkCommandPool getCommandPool (void) const { return *m_commandPool; }
1028 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_binaryCollection; }
1031 const vk::InstanceInterface& m_vki;
1032 const vk::DeviceInterface& m_vkd;
1033 const vk::VkPhysicalDevice m_physicalDevice;
1034 const vk::VkDevice m_device;
1035 const vk::VkQueue m_queue;
1036 const deUint32 m_queueFamilyIndex;
1037 const vector<pair<deUint32, vk::VkQueue> >& m_queues;
1038 const vk::Unique<vk::VkCommandPool> m_commandPool;
1039 const vk::ProgramCollection<vk::ProgramBinary>& m_binaryCollection;
1040 vector<deUint32> m_queueFamilies;
1043 class PrepareContext
1046 PrepareContext (const Context& context,
1047 const Memory& memory)
1048 : m_context (context)
1053 const Memory& getMemory (void) const { return m_memory; }
1054 const Context& getContext (void) const { return m_context; }
1055 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
1057 void setBuffer (vk::Move<vk::VkBuffer> buffer,
1058 vk::VkDeviceSize size)
1060 DE_ASSERT(!m_currentImage);
1061 DE_ASSERT(!m_currentBuffer);
1063 m_currentBuffer = buffer;
1064 m_currentBufferSize = size;
1067 vk::VkBuffer getBuffer (void) const { return *m_currentBuffer; }
1068 vk::VkDeviceSize getBufferSize (void) const
1070 DE_ASSERT(m_currentBuffer);
1071 return m_currentBufferSize;
1074 void releaseBuffer (void) { m_currentBuffer.disown(); }
1076 void setImage (vk::Move<vk::VkImage> image,
1077 vk::VkImageLayout layout,
1078 vk::VkDeviceSize memorySize,
1082 DE_ASSERT(!m_currentImage);
1083 DE_ASSERT(!m_currentBuffer);
1085 m_currentImage = image;
1086 m_currentImageMemorySize = memorySize;
1087 m_currentImageLayout = layout;
1088 m_currentImageWidth = width;
1089 m_currentImageHeight = height;
1092 void setImageLayout (vk::VkImageLayout layout)
1094 DE_ASSERT(m_currentImage);
1095 m_currentImageLayout = layout;
1098 vk::VkImage getImage (void) const { return *m_currentImage; }
1099 deInt32 getImageWidth (void) const
1101 DE_ASSERT(m_currentImage);
1102 return m_currentImageWidth;
1104 deInt32 getImageHeight (void) const
1106 DE_ASSERT(m_currentImage);
1107 return m_currentImageHeight;
1109 vk::VkDeviceSize getImageMemorySize (void) const
1111 DE_ASSERT(m_currentImage);
1112 return m_currentImageMemorySize;
1115 void releaseImage (void) { m_currentImage.disown(); }
1117 vk::VkImageLayout getImageLayout (void) const
1119 DE_ASSERT(m_currentImage);
1120 return m_currentImageLayout;
1124 const Context& m_context;
1125 const Memory& m_memory;
1127 vk::Move<vk::VkBuffer> m_currentBuffer;
1128 vk::VkDeviceSize m_currentBufferSize;
1130 vk::Move<vk::VkImage> m_currentImage;
1131 vk::VkDeviceSize m_currentImageMemorySize;
1132 vk::VkImageLayout m_currentImageLayout;
1133 deInt32 m_currentImageWidth;
1134 deInt32 m_currentImageHeight;
1137 class ExecuteContext
1140 ExecuteContext (const Context& context)
1141 : m_context (context)
1145 const Context& getContext (void) const { return m_context; }
1146 void setMapping (void* ptr) { m_mapping = ptr; }
1147 void* getMapping (void) const { return m_mapping; }
1150 const Context& m_context;
1157 VerifyContext (TestLog& log,
1158 tcu::ResultCollector& resultCollector,
1159 const Context& context,
1160 vk::VkDeviceSize size)
1162 , m_resultCollector (resultCollector)
1163 , m_context (context)
1164 , m_reference ((size_t)size)
1168 const Context& getContext (void) const { return m_context; }
1169 TestLog& getLog (void) const { return m_log; }
1170 tcu::ResultCollector& getResultCollector (void) const { return m_resultCollector; }
1172 ReferenceMemory& getReference (void) { return m_reference; }
1173 TextureLevel& getReferenceImage (void) { return m_referenceImage;}
1177 tcu::ResultCollector& m_resultCollector;
1178 const Context& m_context;
1179 ReferenceMemory m_reference;
1180 TextureLevel m_referenceImage;
1186 // Constructor should allocate all non-vulkan resources.
1187 virtual ~Command (void) {}
1189 // Get name of the command
1190 virtual const char* getName (void) const = 0;
1192 // Log prepare operations
1193 virtual void logPrepare (TestLog&, size_t) const {}
1194 // Log executed operations
1195 virtual void logExecute (TestLog&, size_t) const {}
1197 // Prepare should allocate all vulkan resources and resources that require
1198 // that buffer or memory has been already allocated. This should build all
1199 // command buffers etc.
1200 virtual void prepare (PrepareContext&) {}
1202 // Execute command. Write or read mapped memory, submit commands to queue
1204 virtual void execute (ExecuteContext&) {}
1206 // Verify that results are correct.
1207 virtual void verify (VerifyContext&, size_t) {}
1210 // Allow only inheritance
1215 Command (const Command&);
1216 Command& operator& (const Command&);
1219 class Map : public Command
1224 const char* getName (void) const { return "Map"; }
1227 void logExecute (TestLog& log, size_t commandIndex) const
1229 log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
1232 void prepare (PrepareContext& context)
1234 m_memory = context.getMemory().getMemory();
1235 m_size = context.getMemory().getSize();
1238 void execute (ExecuteContext& context)
1240 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1241 const vk::VkDevice device = context.getContext().getDevice();
1243 context.setMapping(mapMemory(vkd, device, m_memory, m_size));
1247 vk::VkDeviceMemory m_memory;
1248 vk::VkDeviceSize m_size;
1251 class UnMap : public Command
1256 const char* getName (void) const { return "UnMap"; }
1258 void logExecute (TestLog& log, size_t commandIndex) const
1260 log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
1263 void prepare (PrepareContext& context)
1265 m_memory = context.getMemory().getMemory();
1268 void execute (ExecuteContext& context)
1270 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1271 const vk::VkDevice device = context.getContext().getDevice();
1273 vkd.unmapMemory(device, m_memory);
1274 context.setMapping(DE_NULL);
1278 vk::VkDeviceMemory m_memory;
1281 class Invalidate : public Command
1284 Invalidate (void) {}
1285 ~Invalidate (void) {}
1286 const char* getName (void) const { return "Invalidate"; }
1288 void logExecute (TestLog& log, size_t commandIndex) const
1290 log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
1293 void prepare (PrepareContext& context)
1295 m_memory = context.getMemory().getMemory();
1296 m_size = context.getMemory().getSize();
1299 void execute (ExecuteContext& context)
1301 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1302 const vk::VkDevice device = context.getContext().getDevice();
1304 vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1308 vk::VkDeviceMemory m_memory;
1309 vk::VkDeviceSize m_size;
1312 class Flush : public Command
1317 const char* getName (void) const { return "Flush"; }
1319 void logExecute (TestLog& log, size_t commandIndex) const
1321 log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
1324 void prepare (PrepareContext& context)
1326 m_memory = context.getMemory().getMemory();
1327 m_size = context.getMemory().getSize();
1330 void execute (ExecuteContext& context)
1332 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1333 const vk::VkDevice device = context.getContext().getDevice();
1335 vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1339 vk::VkDeviceMemory m_memory;
1340 vk::VkDeviceSize m_size;
1343 // Host memory reads and writes
1344 class HostMemoryAccess : public Command
1347 HostMemoryAccess (bool read, bool write, deUint32 seed);
1348 ~HostMemoryAccess (void) {}
1349 const char* getName (void) const { return "HostMemoryAccess"; }
1351 void logExecute (TestLog& log, size_t commandIndex) const;
1352 void prepare (PrepareContext& context);
1353 void execute (ExecuteContext& context);
1355 void verify (VerifyContext& context, size_t commandIndex);
1360 const deUint32 m_seed;
1363 vector<deUint8> m_readData;
1366 HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
1373 void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
1375 log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "") << ", seed: " << m_seed << TestLog::EndMessage;
1378 void HostMemoryAccess::prepare (PrepareContext& context)
1380 m_size = (size_t)context.getMemory().getSize();
1383 m_readData.resize(m_size, 0);
1386 void HostMemoryAccess::execute (ExecuteContext& context)
1388 de::Random rng (m_seed);
1389 deUint8* const ptr = (deUint8*)context.getMapping();
1391 if (m_read && m_write)
1393 for (size_t pos = 0; pos < m_size; pos++)
1395 const deUint8 mask = rng.getUint8();
1396 const deUint8 value = ptr[pos];
1398 m_readData[pos] = value;
1399 ptr[pos] = value ^ mask;
1404 for (size_t pos = 0; pos < m_size; pos++)
1406 const deUint8 value = ptr[pos];
1408 m_readData[pos] = value;
1413 for (size_t pos = 0; pos < m_size; pos++)
1415 const deUint8 value = rng.getUint8();
1421 DE_FATAL("Host memory access without read or write.");
1424 void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
1426 tcu::ResultCollector& resultCollector = context.getResultCollector();
1427 ReferenceMemory& reference = context.getReference();
1428 de::Random rng (m_seed);
1430 if (m_read && m_write)
1432 for (size_t pos = 0; pos < m_size; pos++)
1434 const deUint8 mask = rng.getUint8();
1435 const deUint8 value = m_readData[pos];
1437 if (reference.isDefined(pos))
1439 if (value != reference.get(pos))
1441 resultCollector.fail(
1442 de::toString(commandIndex) + ":" + getName()
1443 + " Result differs from reference, Expected: "
1444 + de::toString(tcu::toHex<8>(reference.get(pos)))
1446 + de::toString(tcu::toHex<8>(value))
1448 + de::toString(pos));
1452 reference.set(pos, reference.get(pos) ^ mask);
1458 for (size_t pos = 0; pos < m_size; pos++)
1460 const deUint8 value = m_readData[pos];
1462 if (reference.isDefined(pos))
1464 if (value != reference.get(pos))
1466 resultCollector.fail(
1467 de::toString(commandIndex) + ":" + getName()
1468 + " Result differs from reference, Expected: "
1469 + de::toString(tcu::toHex<8>(reference.get(pos)))
1471 + de::toString(tcu::toHex<8>(value))
1473 + de::toString(pos));
1481 for (size_t pos = 0; pos < m_size; pos++)
1483 const deUint8 value = rng.getUint8();
1485 reference.set(pos, value);
1489 DE_FATAL("Host memory access without read or write.");
1492 class CreateBuffer : public Command
1495 CreateBuffer (vk::VkBufferUsageFlags usage,
1496 vk::VkSharingMode sharing);
1497 ~CreateBuffer (void) {}
1498 const char* getName (void) const { return "CreateBuffer"; }
1500 void logPrepare (TestLog& log, size_t commandIndex) const;
1501 void prepare (PrepareContext& context);
1504 const vk::VkBufferUsageFlags m_usage;
1505 const vk::VkSharingMode m_sharing;
1508 CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags usage,
1509 vk::VkSharingMode sharing)
1511 , m_sharing (sharing)
1515 void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1517 log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
1520 void CreateBuffer::prepare (PrepareContext& context)
1522 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1523 const vk::VkDevice device = context.getContext().getDevice();
1524 const vk::VkDeviceSize bufferSize = context.getMemory().getMaxBufferSize();
1525 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1527 context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
1530 class DestroyBuffer : public Command
1533 DestroyBuffer (void);
1534 ~DestroyBuffer (void) {}
1535 const char* getName (void) const { return "DestroyBuffer"; }
1537 void logExecute (TestLog& log, size_t commandIndex) const;
1538 void prepare (PrepareContext& context);
1539 void execute (ExecuteContext& context);
1542 vk::Move<vk::VkBuffer> m_buffer;
1545 DestroyBuffer::DestroyBuffer (void)
1549 void DestroyBuffer::prepare (PrepareContext& context)
1551 m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1552 context.releaseBuffer();
1555 void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
1557 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
1560 void DestroyBuffer::execute (ExecuteContext& context)
1562 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1563 const vk::VkDevice device = context.getContext().getDevice();
1565 vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
1568 class BindBufferMemory : public Command
1571 BindBufferMemory (void) {}
1572 ~BindBufferMemory (void) {}
1573 const char* getName (void) const { return "BindBufferMemory"; }
1575 void logPrepare (TestLog& log, size_t commandIndex) const;
1576 void prepare (PrepareContext& context);
1579 void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
1581 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
1584 void BindBufferMemory::prepare (PrepareContext& context)
1586 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1587 const vk::VkDevice device = context.getContext().getDevice();
1589 VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
1592 class CreateImage : public Command
1595 CreateImage (vk::VkImageUsageFlags usage,
1596 vk::VkSharingMode sharing);
1597 ~CreateImage (void) {}
1598 const char* getName (void) const { return "CreateImage"; }
1600 void logPrepare (TestLog& log, size_t commandIndex) const;
1601 void prepare (PrepareContext& context);
1602 void verify (VerifyContext& context, size_t commandIndex);
1605 const vk::VkImageUsageFlags m_usage;
1606 const vk::VkSharingMode m_sharing;
1607 deInt32 m_imageWidth;
1608 deInt32 m_imageHeight;
1611 CreateImage::CreateImage (vk::VkImageUsageFlags usage,
1612 vk::VkSharingMode sharing)
1614 , m_sharing (sharing)
1618 void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
1620 log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage) << TestLog::EndMessage;
1623 void CreateImage::prepare (PrepareContext& context)
1625 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1626 const vk::VkDevice device = context.getContext().getDevice();
1627 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1629 m_imageWidth = context.getMemory().getMaxImageWidth();
1630 m_imageHeight = context.getMemory().getMaxImageHeight();
1633 const vk::VkImageCreateInfo createInfo =
1635 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1639 vk::VK_IMAGE_TYPE_2D,
1640 vk::VK_FORMAT_R8G8B8A8_UNORM,
1642 (deUint32)m_imageWidth,
1643 (deUint32)m_imageHeight,
1647 vk::VK_SAMPLE_COUNT_1_BIT,
1648 vk::VK_IMAGE_TILING_OPTIMAL,
1651 (deUint32)queueFamilies.size(),
1653 vk::VK_IMAGE_LAYOUT_UNDEFINED
1655 vk::Move<vk::VkImage> image (createImage(vkd, device, &createInfo));
1656 const vk::VkMemoryRequirements requirements = vk::getImageMemoryRequirements(vkd, device, *image);
1658 context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
1662 void CreateImage::verify (VerifyContext& context, size_t)
1664 context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
1667 class DestroyImage : public Command
1670 DestroyImage (void);
1671 ~DestroyImage (void) {}
1672 const char* getName (void) const { return "DestroyImage"; }
1674 void logExecute (TestLog& log, size_t commandIndex) const;
1675 void prepare (PrepareContext& context);
1676 void execute (ExecuteContext& context);
1679 vk::Move<vk::VkImage> m_image;
1682 DestroyImage::DestroyImage (void)
1686 void DestroyImage::prepare (PrepareContext& context)
1688 m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1689 context.releaseImage();
1693 void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
1695 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
1698 void DestroyImage::execute (ExecuteContext& context)
1700 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1701 const vk::VkDevice device = context.getContext().getDevice();
1703 vkd.destroyImage(device, m_image.disown(), DE_NULL);
1706 class BindImageMemory : public Command
1709 BindImageMemory (void) {}
1710 ~BindImageMemory (void) {}
1711 const char* getName (void) const { return "BindImageMemory"; }
1713 void logPrepare (TestLog& log, size_t commandIndex) const;
1714 void prepare (PrepareContext& context);
1717 void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
1719 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
1722 void BindImageMemory::prepare (PrepareContext& context)
1724 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1725 const vk::VkDevice device = context.getContext().getDevice();
1727 VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
1730 class QueueWaitIdle : public Command
1733 QueueWaitIdle (void) {}
1734 ~QueueWaitIdle (void) {}
1735 const char* getName (void) const { return "QueuetWaitIdle"; }
1737 void logExecute (TestLog& log, size_t commandIndex) const;
1738 void execute (ExecuteContext& context);
1741 void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1743 log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
1746 void QueueWaitIdle::execute (ExecuteContext& context)
1748 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1749 const vk::VkQueue queue = context.getContext().getQueue();
1751 VK_CHECK(vkd.queueWaitIdle(queue));
1754 class DeviceWaitIdle : public Command
1757 DeviceWaitIdle (void) {}
1758 ~DeviceWaitIdle (void) {}
1759 const char* getName (void) const { return "DeviceWaitIdle"; }
1761 void logExecute (TestLog& log, size_t commandIndex) const;
1762 void execute (ExecuteContext& context);
1765 void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1767 log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
1770 void DeviceWaitIdle::execute (ExecuteContext& context)
1772 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1773 const vk::VkDevice device = context.getContext().getDevice();
1775 VK_CHECK(vkd.deviceWaitIdle(device));
1781 SubmitContext (const PrepareContext& context,
1782 const vk::VkCommandBuffer commandBuffer)
1783 : m_context (context)
1784 , m_commandBuffer (commandBuffer)
1788 const Memory& getMemory (void) const { return m_context.getMemory(); }
1789 const Context& getContext (void) const { return m_context.getContext(); }
1790 vk::VkCommandBuffer getCommandBuffer (void) const { return m_commandBuffer; }
1792 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
1793 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
1795 vk::VkImage getImage (void) const { return m_context.getImage(); }
1796 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
1797 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
1798 vk::VkImageLayout getImageLayout (void) const { return m_context.getImageLayout(); }
1801 const PrepareContext& m_context;
1802 const vk::VkCommandBuffer m_commandBuffer;
1808 virtual ~CmdCommand (void) {}
1809 virtual const char* getName (void) const = 0;
1811 // Log things that are done during prepare
1812 virtual void logPrepare (TestLog&, size_t) const {}
1813 // Log submitted calls etc.
1814 virtual void logSubmit (TestLog&, size_t) const {}
1816 // Allocate vulkan resources and prepare for submit.
1817 virtual void prepare (PrepareContext&) {}
1819 // Submit commands to command buffer.
1820 virtual void submit (SubmitContext&) {}
1823 virtual void verify (VerifyContext&, size_t) {}
1826 class SubmitCommandBuffer : public Command
1829 SubmitCommandBuffer (const vector<CmdCommand*>& commands);
1830 ~SubmitCommandBuffer (void);
1832 const char* getName (void) const { return "SubmitCommandBuffer"; }
1833 void logExecute (TestLog& log, size_t commandIndex) const;
1834 void logPrepare (TestLog& log, size_t commandIndex) const;
1836 // Allocate command buffer and submit commands to command buffer
1837 void prepare (PrepareContext& context);
1838 void execute (ExecuteContext& context);
1840 // Verify that results are correct.
1841 void verify (VerifyContext& context, size_t commandIndex);
1844 vector<CmdCommand*> m_commands;
1845 vk::Move<vk::VkCommandBuffer> m_commandBuffer;
1848 SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
1849 : m_commands (commands)
1853 SubmitCommandBuffer::~SubmitCommandBuffer (void)
1855 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1856 delete m_commands[cmdNdx];
1859 void SubmitCommandBuffer::prepare (PrepareContext& context)
1861 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1862 const vk::VkDevice device = context.getContext().getDevice();
1863 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
1865 m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1867 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1869 CmdCommand& command = *m_commands[cmdNdx];
1871 command.prepare(context);
1875 SubmitContext submitContext (context, *m_commandBuffer);
1877 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1879 CmdCommand& command = *m_commands[cmdNdx];
1881 command.submit(submitContext);
1884 VK_CHECK(vkd.endCommandBuffer(*m_commandBuffer));
1888 void SubmitCommandBuffer::execute (ExecuteContext& context)
1890 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1891 const vk::VkCommandBuffer cmd = *m_commandBuffer;
1892 const vk::VkQueue queue = context.getContext().getQueue();
1893 const vk::VkSubmitInfo submit =
1895 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
1900 (const vk::VkPipelineStageFlags*)DE_NULL,
1909 vkd.queueSubmit(queue, 1, &submit, 0);
1912 void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
1914 const string sectionName (de::toString(commandIndex) + ":" + getName());
1915 const tcu::ScopedLogSection section (context.getLog(), sectionName, sectionName);
1917 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1918 m_commands[cmdNdx]->verify(context, cmdNdx);
1921 void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1923 const string sectionName (de::toString(commandIndex) + ":" + getName());
1924 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1926 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1927 m_commands[cmdNdx]->logPrepare(log, cmdNdx);
1930 void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
1932 const string sectionName (de::toString(commandIndex) + ":" + getName());
1933 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1935 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1936 m_commands[cmdNdx]->logSubmit(log, cmdNdx);
1939 class PipelineBarrier : public CmdCommand
1949 PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1950 const vk::VkAccessFlags srcAccesses,
1951 const vk::VkPipelineStageFlags dstStages,
1952 const vk::VkAccessFlags dstAccesses,
1954 ~PipelineBarrier (void) {}
1955 const char* getName (void) const { return "PipelineBarrier"; }
1957 void logSubmit (TestLog& log, size_t commandIndex) const;
1958 void submit (SubmitContext& context);
1961 const vk::VkPipelineStageFlags m_srcStages;
1962 const vk::VkAccessFlags m_srcAccesses;
1963 const vk::VkPipelineStageFlags m_dstStages;
1964 const vk::VkAccessFlags m_dstAccesses;
1968 PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1969 const vk::VkAccessFlags srcAccesses,
1970 const vk::VkPipelineStageFlags dstStages,
1971 const vk::VkAccessFlags dstAccesses,
1973 : m_srcStages (srcStages)
1974 , m_srcAccesses (srcAccesses)
1975 , m_dstStages (dstStages)
1976 , m_dstAccesses (dstAccesses)
1981 void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
1983 log << TestLog::Message << commandIndex << ":" << getName()
1984 << " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
1985 : m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
1986 : "Image pipeline barrier")
1987 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
1988 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
1991 void PipelineBarrier::submit (SubmitContext& context)
1993 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1994 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
1996 // \todo [2016-01-08 pyry] This could be cleaned up thanks to latest API changes
2002 const vk::VkMemoryBarrier barrier =
2004 vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
2011 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 1, &barrier, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2017 const vk::VkBufferMemoryBarrier barrier =
2019 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2025 vk::VK_QUEUE_FAMILY_IGNORED,
2026 vk::VK_QUEUE_FAMILY_IGNORED,
2028 context.getBuffer(),
2033 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2039 const vk::VkImageMemoryBarrier barrier =
2041 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2047 context.getImageLayout(),
2048 context.getImageLayout(),
2050 vk::VK_QUEUE_FAMILY_IGNORED,
2051 vk::VK_QUEUE_FAMILY_IGNORED,
2055 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2061 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2066 DE_FATAL("Unknown pipeline barrier type");
2070 class ImageTransition : public CmdCommand
2073 ImageTransition (vk::VkPipelineStageFlags srcStages,
2074 vk::VkAccessFlags srcAccesses,
2076 vk::VkPipelineStageFlags dstStages,
2077 vk::VkAccessFlags dstAccesses,
2079 vk::VkImageLayout srcLayout,
2080 vk::VkImageLayout dstLayout);
2082 ~ImageTransition (void) {}
2083 const char* getName (void) const { return "ImageTransition"; }
2085 void prepare (PrepareContext& context);
2086 void logSubmit (TestLog& log, size_t commandIndex) const;
2087 void submit (SubmitContext& context);
2088 void verify (VerifyContext& context, size_t);
2091 const vk::VkPipelineStageFlags m_srcStages;
2092 const vk::VkAccessFlags m_srcAccesses;
2093 const vk::VkPipelineStageFlags m_dstStages;
2094 const vk::VkAccessFlags m_dstAccesses;
2095 const vk::VkImageLayout m_srcLayout;
2096 const vk::VkImageLayout m_dstLayout;
2098 vk::VkDeviceSize m_imageMemorySize;
2101 ImageTransition::ImageTransition (vk::VkPipelineStageFlags srcStages,
2102 vk::VkAccessFlags srcAccesses,
2104 vk::VkPipelineStageFlags dstStages,
2105 vk::VkAccessFlags dstAccesses,
2107 vk::VkImageLayout srcLayout,
2108 vk::VkImageLayout dstLayout)
2109 : m_srcStages (srcStages)
2110 , m_srcAccesses (srcAccesses)
2111 , m_dstStages (dstStages)
2112 , m_dstAccesses (dstAccesses)
2113 , m_srcLayout (srcLayout)
2114 , m_dstLayout (dstLayout)
2118 void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
2120 log << TestLog::Message << commandIndex << ":" << getName()
2121 << " Image transition pipeline barrier"
2122 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2123 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses)
2124 << ", srcLayout: " << m_srcLayout << ", dstLayout: " << m_dstLayout << TestLog::EndMessage;
2127 void ImageTransition::prepare (PrepareContext& context)
2129 DE_ASSERT(context.getImageLayout() == vk::VK_IMAGE_LAYOUT_UNDEFINED || m_srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED || context.getImageLayout() == m_srcLayout);
2131 context.setImageLayout(m_dstLayout);
2132 m_imageMemorySize = context.getImageMemorySize();
2135 void ImageTransition::submit (SubmitContext& context)
2137 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2138 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2139 const vk::VkImageMemoryBarrier barrier =
2141 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2150 vk::VK_QUEUE_FAMILY_IGNORED,
2151 vk::VK_QUEUE_FAMILY_IGNORED,
2155 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2161 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2164 void ImageTransition::verify (VerifyContext& context, size_t)
2166 context.getReference().setUndefined(0, (size_t)m_imageMemorySize);
2169 class FillBuffer : public CmdCommand
2172 FillBuffer (deUint32 value) : m_value(value) {}
2173 ~FillBuffer (void) {}
2174 const char* getName (void) const { return "FillBuffer"; }
2176 void logSubmit (TestLog& log, size_t commandIndex) const;
2177 void submit (SubmitContext& context);
2178 void verify (VerifyContext& context, size_t commandIndex);
2181 const deUint32 m_value;
2182 vk::VkDeviceSize m_bufferSize;
2185 void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2187 log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
2190 void FillBuffer::submit (SubmitContext& context)
2192 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2193 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2194 const vk::VkBuffer buffer = context.getBuffer();
2195 const vk::VkDeviceSize sizeMask = ~(0x3ull); // \note Round down to multiple of 4
2197 m_bufferSize = sizeMask & context.getBufferSize();
2198 vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
2201 void FillBuffer::verify (VerifyContext& context, size_t)
2203 ReferenceMemory& reference = context.getReference();
2205 for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
2207 #if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2208 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
2210 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
2215 class UpdateBuffer : public CmdCommand
2218 UpdateBuffer (deUint32 seed) : m_seed(seed) {}
2219 ~UpdateBuffer (void) {}
2220 const char* getName (void) const { return "UpdateBuffer"; }
2222 void logSubmit (TestLog& log, size_t commandIndex) const;
2223 void submit (SubmitContext& context);
2224 void verify (VerifyContext& context, size_t commandIndex);
2227 const deUint32 m_seed;
2228 vk::VkDeviceSize m_bufferSize;
2231 void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2233 log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
2236 void UpdateBuffer::submit (SubmitContext& context)
2238 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2239 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2240 const vk::VkBuffer buffer = context.getBuffer();
2241 const size_t blockSize = 65536;
2242 std::vector<deUint8> data (blockSize, 0);
2243 de::Random rng (m_seed);
2245 m_bufferSize = context.getBufferSize();
2247 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2249 for (size_t ndx = 0; ndx < data.size(); ndx++)
2250 data[ndx] = rng.getUint8();
2252 if (m_bufferSize - updated > blockSize)
2253 vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
2255 vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
2259 void UpdateBuffer::verify (VerifyContext& context, size_t)
2261 ReferenceMemory& reference = context.getReference();
2262 const size_t blockSize = 65536;
2263 vector<deUint8> data (blockSize, 0);
2264 de::Random rng (m_seed);
2266 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2268 for (size_t ndx = 0; ndx < data.size(); ndx++)
2269 data[ndx] = rng.getUint8();
2271 if (m_bufferSize - updated > blockSize)
2272 reference.setData(updated, blockSize, &data[0]);
2274 reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
2278 class BufferCopyToBuffer : public CmdCommand
2281 BufferCopyToBuffer (void) {}
2282 ~BufferCopyToBuffer (void) {}
2283 const char* getName (void) const { return "BufferCopyToBuffer"; }
2285 void logPrepare (TestLog& log, size_t commandIndex) const;
2286 void prepare (PrepareContext& context);
2287 void logSubmit (TestLog& log, size_t commandIndex) const;
2288 void submit (SubmitContext& context);
2289 void verify (VerifyContext& context, size_t commandIndex);
2292 vk::VkDeviceSize m_bufferSize;
2293 vk::Move<vk::VkBuffer> m_dstBuffer;
2294 vk::Move<vk::VkDeviceMemory> m_memory;
2297 void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2299 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
2302 void BufferCopyToBuffer::prepare (PrepareContext& context)
2304 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2305 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2306 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2307 const vk::VkDevice device = context.getContext().getDevice();
2308 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2310 m_bufferSize = context.getBufferSize();
2312 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2313 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2316 void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2318 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
2321 void BufferCopyToBuffer::submit (SubmitContext& context)
2323 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2324 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2325 const vk::VkBufferCopy range =
2331 vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
2334 void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2336 tcu::ResultCollector& resultCollector (context.getResultCollector());
2337 ReferenceMemory& reference (context.getReference());
2338 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2339 const vk::VkDevice device = context.getContext().getDevice();
2340 const vk::VkQueue queue = context.getContext().getQueue();
2341 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2342 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2343 const vk::VkBufferMemoryBarrier barrier =
2345 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2348 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2349 vk::VK_ACCESS_HOST_READ_BIT,
2351 vk::VK_QUEUE_FAMILY_IGNORED,
2352 vk::VK_QUEUE_FAMILY_IGNORED,
2358 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2360 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2361 queueRun(vkd, queue, *commandBuffer);
2364 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2367 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2370 const deUint8* const data = (const deUint8*)ptr;
2372 for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
2374 if (reference.isDefined(pos))
2376 if (data[pos] != reference.get(pos))
2378 resultCollector.fail(
2379 de::toString(commandIndex) + ":" + getName()
2380 + " Result differs from reference, Expected: "
2381 + de::toString(tcu::toHex<8>(reference.get(pos)))
2383 + de::toString(tcu::toHex<8>(data[pos]))
2385 + de::toString(pos));
2392 vkd.unmapMemory(device, *m_memory);
2395 context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
2399 class BufferCopyFromBuffer : public CmdCommand
2402 BufferCopyFromBuffer (deUint32 seed) : m_seed(seed) {}
2403 ~BufferCopyFromBuffer (void) {}
2404 const char* getName (void) const { return "BufferCopyFromBuffer"; }
2406 void logPrepare (TestLog& log, size_t commandIndex) const;
2407 void prepare (PrepareContext& context);
2408 void logSubmit (TestLog& log, size_t commandIndex) const;
2409 void submit (SubmitContext& context);
2410 void verify (VerifyContext& context, size_t commandIndex);
2413 const deUint32 m_seed;
2414 vk::VkDeviceSize m_bufferSize;
2415 vk::Move<vk::VkBuffer> m_srcBuffer;
2416 vk::Move<vk::VkDeviceMemory> m_memory;
2419 void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2421 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
2424 void BufferCopyFromBuffer::prepare (PrepareContext& context)
2426 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2427 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2428 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2429 const vk::VkDevice device = context.getContext().getDevice();
2430 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2432 m_bufferSize = context.getBufferSize();
2433 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2434 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2437 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2438 de::Random rng (m_seed);
2441 deUint8* const data = (deUint8*)ptr;
2443 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2444 data[ndx] = rng.getUint8();
2447 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2448 vkd.unmapMemory(device, *m_memory);
2452 void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2454 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
2457 void BufferCopyFromBuffer::submit (SubmitContext& context)
2459 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2460 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2461 const vk::VkBufferCopy range =
2467 vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
2470 void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
2472 ReferenceMemory& reference (context.getReference());
2473 de::Random rng (m_seed);
2475 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2476 reference.set(ndx, rng.getUint8());
2479 class BufferCopyToImage : public CmdCommand
2482 BufferCopyToImage (void) {}
2483 ~BufferCopyToImage (void) {}
2484 const char* getName (void) const { return "BufferCopyToImage"; }
2486 void logPrepare (TestLog& log, size_t commandIndex) const;
2487 void prepare (PrepareContext& context);
2488 void logSubmit (TestLog& log, size_t commandIndex) const;
2489 void submit (SubmitContext& context);
2490 void verify (VerifyContext& context, size_t commandIndex);
2493 deInt32 m_imageWidth;
2494 deInt32 m_imageHeight;
2495 vk::Move<vk::VkImage> m_dstImage;
2496 vk::Move<vk::VkDeviceMemory> m_memory;
2499 void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
2501 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
2504 void BufferCopyToImage::prepare (PrepareContext& context)
2506 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2507 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2508 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2509 const vk::VkDevice device = context.getContext().getDevice();
2510 const vk::VkQueue queue = context.getContext().getQueue();
2511 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2512 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2513 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2515 m_imageWidth = imageSize[0];
2516 m_imageHeight = imageSize[1];
2519 const vk::VkImageCreateInfo createInfo =
2521 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2525 vk::VK_IMAGE_TYPE_2D,
2526 vk::VK_FORMAT_R8G8B8A8_UNORM,
2528 (deUint32)m_imageWidth,
2529 (deUint32)m_imageHeight,
2532 1, 1, // mipLevels, arrayLayers
2533 vk::VK_SAMPLE_COUNT_1_BIT,
2535 vk::VK_IMAGE_TILING_OPTIMAL,
2536 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2537 vk::VK_SHARING_MODE_EXCLUSIVE,
2539 (deUint32)queueFamilies.size(),
2541 vk::VK_IMAGE_LAYOUT_UNDEFINED
2544 m_dstImage = vk::createImage(vkd, device, &createInfo);
2547 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
2550 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2551 const vk::VkImageMemoryBarrier barrier =
2553 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2557 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2559 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2560 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2562 vk::VK_QUEUE_FAMILY_IGNORED,
2563 vk::VK_QUEUE_FAMILY_IGNORED,
2567 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2569 1, // Mip level count
2575 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2577 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2578 queueRun(vkd, queue, *commandBuffer);
2582 void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
2584 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
2587 void BufferCopyToImage::submit (SubmitContext& context)
2589 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2590 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2591 const vk::VkBufferImageCopy region =
2596 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2603 (deUint32)m_imageWidth,
2604 (deUint32)m_imageHeight,
2609 vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2612 void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
2614 tcu::ResultCollector& resultCollector (context.getResultCollector());
2615 ReferenceMemory& reference (context.getReference());
2616 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2617 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2618 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2619 const vk::VkDevice device = context.getContext().getDevice();
2620 const vk::VkQueue queue = context.getContext().getQueue();
2621 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2622 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2623 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2624 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2625 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2627 const vk::VkImageMemoryBarrier imageBarrier =
2629 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2632 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2633 vk::VK_ACCESS_TRANSFER_READ_BIT,
2635 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2636 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2638 vk::VK_QUEUE_FAMILY_IGNORED,
2639 vk::VK_QUEUE_FAMILY_IGNORED,
2643 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2645 1, // Mip level count
2650 const vk::VkBufferMemoryBarrier bufferBarrier =
2652 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2655 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2656 vk::VK_ACCESS_HOST_READ_BIT,
2658 vk::VK_QUEUE_FAMILY_IGNORED,
2659 vk::VK_QUEUE_FAMILY_IGNORED,
2665 const vk::VkBufferImageCopy region =
2670 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2677 (deUint32)m_imageWidth,
2678 (deUint32)m_imageHeight,
2683 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
2684 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
2685 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2688 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2689 queueRun(vkd, queue, *commandBuffer);
2692 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2694 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2697 const deUint8* const data = (const deUint8*)ptr;
2699 for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
2701 if (reference.isDefined(pos))
2703 if (data[pos] != reference.get(pos))
2705 resultCollector.fail(
2706 de::toString(commandIndex) + ":" + getName()
2707 + " Result differs from reference, Expected: "
2708 + de::toString(tcu::toHex<8>(reference.get(pos)))
2710 + de::toString(tcu::toHex<8>(data[pos]))
2712 + de::toString(pos));
2719 vkd.unmapMemory(device, *memory);
2723 class BufferCopyFromImage : public CmdCommand
2726 BufferCopyFromImage (deUint32 seed) : m_seed(seed) {}
2727 ~BufferCopyFromImage (void) {}
2728 const char* getName (void) const { return "BufferCopyFromImage"; }
2730 void logPrepare (TestLog& log, size_t commandIndex) const;
2731 void prepare (PrepareContext& context);
2732 void logSubmit (TestLog& log, size_t commandIndex) const;
2733 void submit (SubmitContext& context);
2734 void verify (VerifyContext& context, size_t commandIndex);
2737 const deUint32 m_seed;
2738 deInt32 m_imageWidth;
2739 deInt32 m_imageHeight;
2740 vk::Move<vk::VkImage> m_srcImage;
2741 vk::Move<vk::VkDeviceMemory> m_memory;
2744 void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
2746 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
2749 void BufferCopyFromImage::prepare (PrepareContext& context)
2751 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2752 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2753 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2754 const vk::VkDevice device = context.getContext().getDevice();
2755 const vk::VkQueue queue = context.getContext().getQueue();
2756 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2757 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2758 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2760 m_imageWidth = imageSize[0];
2761 m_imageHeight = imageSize[1];
2764 const vk::VkImageCreateInfo createInfo =
2766 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2770 vk::VK_IMAGE_TYPE_2D,
2771 vk::VK_FORMAT_R8G8B8A8_UNORM,
2773 (deUint32)m_imageWidth,
2774 (deUint32)m_imageHeight,
2777 1, 1, // mipLevels, arrayLayers
2778 vk::VK_SAMPLE_COUNT_1_BIT,
2780 vk::VK_IMAGE_TILING_OPTIMAL,
2781 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2782 vk::VK_SHARING_MODE_EXCLUSIVE,
2784 (deUint32)queueFamilies.size(),
2786 vk::VK_IMAGE_LAYOUT_UNDEFINED
2789 m_srcImage = vk::createImage(vkd, device, &createInfo);
2792 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
2795 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2796 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2797 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2798 const vk::VkImageMemoryBarrier preImageBarrier =
2800 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2804 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2806 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2807 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2809 vk::VK_QUEUE_FAMILY_IGNORED,
2810 vk::VK_QUEUE_FAMILY_IGNORED,
2814 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2816 1, // Mip level count
2821 const vk::VkImageMemoryBarrier postImageBarrier =
2823 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2826 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2829 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2830 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2832 vk::VK_QUEUE_FAMILY_IGNORED,
2833 vk::VK_QUEUE_FAMILY_IGNORED,
2837 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2839 1, // Mip level count
2844 const vk::VkBufferImageCopy region =
2849 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2856 (deUint32)m_imageWidth,
2857 (deUint32)m_imageHeight,
2863 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2864 de::Random rng (m_seed);
2867 deUint8* const data = (deUint8*)ptr;
2869 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2870 data[ndx] = rng.getUint8();
2873 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2874 vkd.unmapMemory(device, *memory);
2877 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
2878 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2879 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
2881 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2882 queueRun(vkd, queue, *commandBuffer);
2886 void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
2888 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
2891 void BufferCopyFromImage::submit (SubmitContext& context)
2893 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2894 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2895 const vk::VkBufferImageCopy region =
2900 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2907 (deUint32)m_imageWidth,
2908 (deUint32)m_imageHeight,
2913 vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, ®ion);
2916 void BufferCopyFromImage::verify (VerifyContext& context, size_t)
2918 ReferenceMemory& reference (context.getReference());
2919 de::Random rng (m_seed);
2921 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2922 reference.set(ndx, rng.getUint8());
2925 class ImageCopyToBuffer : public CmdCommand
2928 ImageCopyToBuffer (void) {}
2929 ~ImageCopyToBuffer (void) {}
2930 const char* getName (void) const { return "BufferCopyToImage"; }
2932 void logPrepare (TestLog& log, size_t commandIndex) const;
2933 void prepare (PrepareContext& context);
2934 void logSubmit (TestLog& log, size_t commandIndex) const;
2935 void submit (SubmitContext& context);
2936 void verify (VerifyContext& context, size_t commandIndex);
2939 vk::VkDeviceSize m_bufferSize;
2940 vk::Move<vk::VkBuffer> m_dstBuffer;
2941 vk::Move<vk::VkDeviceMemory> m_memory;
2942 vk::VkDeviceSize m_imageMemorySize;
2943 deInt32 m_imageWidth;
2944 deInt32 m_imageHeight;
2947 void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2949 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
2952 void ImageCopyToBuffer::prepare (PrepareContext& context)
2954 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2955 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2956 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2957 const vk::VkDevice device = context.getContext().getDevice();
2958 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2960 m_imageWidth = context.getImageWidth();
2961 m_imageHeight = context.getImageHeight();
2962 m_bufferSize = 4 * m_imageWidth * m_imageHeight;
2963 m_imageMemorySize = context.getImageMemorySize();
2964 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2965 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2968 void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2970 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
2973 void ImageCopyToBuffer::submit (SubmitContext& context)
2975 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2976 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2977 const vk::VkBufferImageCopy region =
2982 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2989 (deUint32)m_imageWidth,
2990 (deUint32)m_imageHeight,
2995 vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstBuffer, 1, ®ion);
2998 void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
3000 tcu::ResultCollector& resultCollector (context.getResultCollector());
3001 ReferenceMemory& reference (context.getReference());
3002 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3003 const vk::VkDevice device = context.getContext().getDevice();
3004 const vk::VkQueue queue = context.getContext().getQueue();
3005 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3006 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3007 const vk::VkBufferMemoryBarrier barrier =
3009 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3012 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3013 vk::VK_ACCESS_HOST_READ_BIT,
3015 vk::VK_QUEUE_FAMILY_IGNORED,
3016 vk::VK_QUEUE_FAMILY_IGNORED,
3022 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3024 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3025 queueRun(vkd, queue, *commandBuffer);
3027 reference.setUndefined(0, (size_t)m_imageMemorySize);
3029 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3030 const ConstPixelBufferAccess referenceImage (context.getReferenceImage().getAccess());
3031 const ConstPixelBufferAccess resultImage (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
3033 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3035 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3036 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3038 vkd.unmapMemory(device, *m_memory);
3042 class ImageCopyFromBuffer : public CmdCommand
3045 ImageCopyFromBuffer (deUint32 seed) : m_seed(seed) {}
3046 ~ImageCopyFromBuffer (void) {}
3047 const char* getName (void) const { return "ImageCopyFromBuffer"; }
3049 void logPrepare (TestLog& log, size_t commandIndex) const;
3050 void prepare (PrepareContext& context);
3051 void logSubmit (TestLog& log, size_t commandIndex) const;
3052 void submit (SubmitContext& context);
3053 void verify (VerifyContext& context, size_t commandIndex);
3056 const deUint32 m_seed;
3057 deInt32 m_imageWidth;
3058 deInt32 m_imageHeight;
3059 vk::VkDeviceSize m_imageMemorySize;
3060 vk::VkDeviceSize m_bufferSize;
3061 vk::Move<vk::VkBuffer> m_srcBuffer;
3062 vk::Move<vk::VkDeviceMemory> m_memory;
3065 void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
3067 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
3070 void ImageCopyFromBuffer::prepare (PrepareContext& context)
3072 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3073 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3074 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3075 const vk::VkDevice device = context.getContext().getDevice();
3076 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3078 m_imageWidth = context.getImageHeight();
3079 m_imageHeight = context.getImageWidth();
3080 m_imageMemorySize = context.getImageMemorySize();
3081 m_bufferSize = m_imageWidth * m_imageHeight * 4;
3082 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
3083 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
3086 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3087 de::Random rng (m_seed);
3090 deUint8* const data = (deUint8*)ptr;
3092 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
3093 data[ndx] = rng.getUint8();
3096 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3097 vkd.unmapMemory(device, *m_memory);
3101 void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
3103 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
3106 void ImageCopyFromBuffer::submit (SubmitContext& context)
3108 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3109 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3110 const vk::VkBufferImageCopy region =
3115 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3122 (deUint32)m_imageWidth,
3123 (deUint32)m_imageHeight,
3128 vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), context.getImageLayout(), 1, ®ion);
3131 void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
3133 ReferenceMemory& reference (context.getReference());
3134 de::Random rng (m_seed);
3136 reference.setUndefined(0, (size_t)m_imageMemorySize);
3139 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3141 for (deInt32 y = 0; y < m_imageHeight; y++)
3142 for (deInt32 x = 0; x < m_imageWidth; x++)
3144 const deUint8 r8 = rng.getUint8();
3145 const deUint8 g8 = rng.getUint8();
3146 const deUint8 b8 = rng.getUint8();
3147 const deUint8 a8 = rng.getUint8();
3149 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3154 class ImageCopyFromImage : public CmdCommand
3157 ImageCopyFromImage (deUint32 seed) : m_seed(seed) {}
3158 ~ImageCopyFromImage (void) {}
3159 const char* getName (void) const { return "ImageCopyFromImage"; }
3161 void logPrepare (TestLog& log, size_t commandIndex) const;
3162 void prepare (PrepareContext& context);
3163 void logSubmit (TestLog& log, size_t commandIndex) const;
3164 void submit (SubmitContext& context);
3165 void verify (VerifyContext& context, size_t commandIndex);
3168 const deUint32 m_seed;
3169 deInt32 m_imageWidth;
3170 deInt32 m_imageHeight;
3171 vk::VkDeviceSize m_imageMemorySize;
3172 vk::Move<vk::VkImage> m_srcImage;
3173 vk::Move<vk::VkDeviceMemory> m_memory;
3176 void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3178 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
3181 void ImageCopyFromImage::prepare (PrepareContext& context)
3183 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3184 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3185 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3186 const vk::VkDevice device = context.getContext().getDevice();
3187 const vk::VkQueue queue = context.getContext().getQueue();
3188 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3189 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3191 m_imageWidth = context.getImageWidth();
3192 m_imageHeight = context.getImageHeight();
3193 m_imageMemorySize = context.getImageMemorySize();
3196 const vk::VkImageCreateInfo createInfo =
3198 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3202 vk::VK_IMAGE_TYPE_2D,
3203 vk::VK_FORMAT_R8G8B8A8_UNORM,
3205 (deUint32)m_imageWidth,
3206 (deUint32)m_imageHeight,
3209 1, 1, // mipLevels, arrayLayers
3210 vk::VK_SAMPLE_COUNT_1_BIT,
3212 vk::VK_IMAGE_TILING_OPTIMAL,
3213 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3214 vk::VK_SHARING_MODE_EXCLUSIVE,
3216 (deUint32)queueFamilies.size(),
3218 vk::VK_IMAGE_LAYOUT_UNDEFINED
3221 m_srcImage = vk::createImage(vkd, device, &createInfo);
3224 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3227 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3228 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3229 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3230 const vk::VkImageMemoryBarrier preImageBarrier =
3232 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3236 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3238 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3239 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3241 vk::VK_QUEUE_FAMILY_IGNORED,
3242 vk::VK_QUEUE_FAMILY_IGNORED,
3246 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3248 1, // Mip level count
3253 const vk::VkImageMemoryBarrier postImageBarrier =
3255 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3258 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3261 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3262 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3264 vk::VK_QUEUE_FAMILY_IGNORED,
3265 vk::VK_QUEUE_FAMILY_IGNORED,
3269 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3271 1, // Mip level count
3276 const vk::VkBufferImageCopy region =
3281 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3288 (deUint32)m_imageWidth,
3289 (deUint32)m_imageHeight,
3295 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3296 de::Random rng (m_seed);
3299 deUint8* const data = (deUint8*)ptr;
3301 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
3302 data[ndx] = rng.getUint8();
3305 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3306 vkd.unmapMemory(device, *memory);
3309 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3310 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3311 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3313 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3314 queueRun(vkd, queue, *commandBuffer);
3318 void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3320 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
3323 void ImageCopyFromImage::submit (SubmitContext& context)
3325 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3326 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3327 const vk::VkImageCopy region =
3330 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3338 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3345 (deUint32)m_imageWidth,
3346 (deUint32)m_imageHeight,
3351 vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), context.getImageLayout(), 1, ®ion);
3354 void ImageCopyFromImage::verify (VerifyContext& context, size_t)
3356 ReferenceMemory& reference (context.getReference());
3357 de::Random rng (m_seed);
3359 reference.setUndefined(0, (size_t)m_imageMemorySize);
3362 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3364 for (deInt32 y = 0; y < m_imageHeight; y++)
3365 for (deInt32 x = 0; x < m_imageWidth; x++)
3367 const deUint8 r8 = rng.getUint8();
3368 const deUint8 g8 = rng.getUint8();
3369 const deUint8 b8 = rng.getUint8();
3370 const deUint8 a8 = rng.getUint8();
3372 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3377 class ImageCopyToImage : public CmdCommand
3380 ImageCopyToImage (void) {}
3381 ~ImageCopyToImage (void) {}
3382 const char* getName (void) const { return "ImageCopyToImage"; }
3384 void logPrepare (TestLog& log, size_t commandIndex) const;
3385 void prepare (PrepareContext& context);
3386 void logSubmit (TestLog& log, size_t commandIndex) const;
3387 void submit (SubmitContext& context);
3388 void verify (VerifyContext& context, size_t commandIndex);
3391 deInt32 m_imageWidth;
3392 deInt32 m_imageHeight;
3393 vk::VkDeviceSize m_imageMemorySize;
3394 vk::Move<vk::VkImage> m_dstImage;
3395 vk::Move<vk::VkDeviceMemory> m_memory;
3398 void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
3400 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
3403 void ImageCopyToImage::prepare (PrepareContext& context)
3405 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3406 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3407 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3408 const vk::VkDevice device = context.getContext().getDevice();
3409 const vk::VkQueue queue = context.getContext().getQueue();
3410 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3411 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3413 m_imageWidth = context.getImageWidth();
3414 m_imageHeight = context.getImageHeight();
3415 m_imageMemorySize = context.getImageMemorySize();
3418 const vk::VkImageCreateInfo createInfo =
3420 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3424 vk::VK_IMAGE_TYPE_2D,
3425 vk::VK_FORMAT_R8G8B8A8_UNORM,
3427 (deUint32)m_imageWidth,
3428 (deUint32)m_imageHeight,
3431 1, 1, // mipLevels, arrayLayers
3432 vk::VK_SAMPLE_COUNT_1_BIT,
3434 vk::VK_IMAGE_TILING_OPTIMAL,
3435 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3436 vk::VK_SHARING_MODE_EXCLUSIVE,
3438 (deUint32)queueFamilies.size(),
3440 vk::VK_IMAGE_LAYOUT_UNDEFINED
3443 m_dstImage = vk::createImage(vkd, device, &createInfo);
3446 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3449 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3450 const vk::VkImageMemoryBarrier barrier =
3452 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3456 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3458 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3459 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3461 vk::VK_QUEUE_FAMILY_IGNORED,
3462 vk::VK_QUEUE_FAMILY_IGNORED,
3466 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3468 1, // Mip level count
3474 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
3476 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3477 queueRun(vkd, queue, *commandBuffer);
3481 void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
3483 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
3486 void ImageCopyToImage::submit (SubmitContext& context)
3488 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3489 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3490 const vk::VkImageCopy region =
3493 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3501 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3508 (deUint32)m_imageWidth,
3509 (deUint32)m_imageHeight,
3514 vkd.cmdCopyImage(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3517 void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
3519 tcu::ResultCollector& resultCollector (context.getResultCollector());
3520 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3521 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3522 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3523 const vk::VkDevice device = context.getContext().getDevice();
3524 const vk::VkQueue queue = context.getContext().getQueue();
3525 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3526 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3527 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3528 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3529 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3531 const vk::VkImageMemoryBarrier imageBarrier =
3533 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3536 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3537 vk::VK_ACCESS_TRANSFER_READ_BIT,
3539 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3540 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3542 vk::VK_QUEUE_FAMILY_IGNORED,
3543 vk::VK_QUEUE_FAMILY_IGNORED,
3547 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3549 1, // Mip level count
3554 const vk::VkBufferMemoryBarrier bufferBarrier =
3556 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3559 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3560 vk::VK_ACCESS_HOST_READ_BIT,
3562 vk::VK_QUEUE_FAMILY_IGNORED,
3563 vk::VK_QUEUE_FAMILY_IGNORED,
3568 const vk::VkBufferImageCopy region =
3573 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3580 (deUint32)m_imageWidth,
3581 (deUint32)m_imageHeight,
3586 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
3587 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
3588 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3591 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3592 queueRun(vkd, queue, *commandBuffer);
3595 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3597 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3600 const deUint8* const data = (const deUint8*)ptr;
3601 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
3602 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3604 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3605 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3608 vkd.unmapMemory(device, *memory);
3618 class ImageBlitFromImage : public CmdCommand
3621 ImageBlitFromImage (deUint32 seed, BlitScale scale) : m_seed(seed), m_scale(scale) {}
3622 ~ImageBlitFromImage (void) {}
3623 const char* getName (void) const { return "ImageBlitFromImage"; }
3625 void logPrepare (TestLog& log, size_t commandIndex) const;
3626 void prepare (PrepareContext& context);
3627 void logSubmit (TestLog& log, size_t commandIndex) const;
3628 void submit (SubmitContext& context);
3629 void verify (VerifyContext& context, size_t commandIndex);
3632 const deUint32 m_seed;
3633 const BlitScale m_scale;
3634 deInt32 m_imageWidth;
3635 deInt32 m_imageHeight;
3636 vk::VkDeviceSize m_imageMemorySize;
3637 deInt32 m_srcImageWidth;
3638 deInt32 m_srcImageHeight;
3639 vk::Move<vk::VkImage> m_srcImage;
3640 vk::Move<vk::VkDeviceMemory> m_memory;
3643 void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3645 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
3648 void ImageBlitFromImage::prepare (PrepareContext& context)
3650 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3651 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3652 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3653 const vk::VkDevice device = context.getContext().getDevice();
3654 const vk::VkQueue queue = context.getContext().getQueue();
3655 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3656 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3658 m_imageWidth = context.getImageWidth();
3659 m_imageHeight = context.getImageHeight();
3660 m_imageMemorySize = context.getImageMemorySize();
3662 if (m_scale == BLIT_SCALE_10)
3664 m_srcImageWidth = m_imageWidth;
3665 m_srcImageHeight = m_imageHeight;
3667 else if (m_scale == BLIT_SCALE_20)
3669 m_srcImageWidth = m_imageWidth / 2;
3670 m_srcImageHeight = m_imageHeight / 2;
3673 DE_FATAL("Unsupported scale");
3676 const vk::VkImageCreateInfo createInfo =
3678 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3682 vk::VK_IMAGE_TYPE_2D,
3683 vk::VK_FORMAT_R8G8B8A8_UNORM,
3685 (deUint32)m_srcImageWidth,
3686 (deUint32)m_srcImageHeight,
3689 1, 1, // mipLevels, arrayLayers
3690 vk::VK_SAMPLE_COUNT_1_BIT,
3692 vk::VK_IMAGE_TILING_OPTIMAL,
3693 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3694 vk::VK_SHARING_MODE_EXCLUSIVE,
3696 (deUint32)queueFamilies.size(),
3698 vk::VK_IMAGE_LAYOUT_UNDEFINED
3701 m_srcImage = vk::createImage(vkd, device, &createInfo);
3704 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3707 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3708 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3709 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3710 const vk::VkImageMemoryBarrier preImageBarrier =
3712 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3716 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3718 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3719 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3721 vk::VK_QUEUE_FAMILY_IGNORED,
3722 vk::VK_QUEUE_FAMILY_IGNORED,
3726 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3728 1, // Mip level count
3733 const vk::VkImageMemoryBarrier postImageBarrier =
3735 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3738 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3741 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3742 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3744 vk::VK_QUEUE_FAMILY_IGNORED,
3745 vk::VK_QUEUE_FAMILY_IGNORED,
3749 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3751 1, // Mip level count
3756 const vk::VkBufferImageCopy region =
3761 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3768 (deUint32)m_srcImageWidth,
3769 (deUint32)m_srcImageHeight,
3775 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
3776 de::Random rng (m_seed);
3779 deUint8* const data = (deUint8*)ptr;
3781 for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
3782 data[ndx] = rng.getUint8();
3785 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
3786 vkd.unmapMemory(device, *memory);
3789 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3790 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3791 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3793 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3794 queueRun(vkd, queue, *commandBuffer);
3798 void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3800 log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
3803 void ImageBlitFromImage::submit (SubmitContext& context)
3805 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3806 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3807 const vk::VkImageBlit region =
3811 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3827 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3841 vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), context.getImageLayout(), 1, ®ion, vk::VK_FILTER_NEAREST);
3844 void ImageBlitFromImage::verify (VerifyContext& context, size_t)
3846 ReferenceMemory& reference (context.getReference());
3847 de::Random rng (m_seed);
3849 reference.setUndefined(0, (size_t)m_imageMemorySize);
3852 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3854 if (m_scale == BLIT_SCALE_10)
3856 for (deInt32 y = 0; y < m_imageHeight; y++)
3857 for (deInt32 x = 0; x < m_imageWidth; x++)
3859 const deUint8 r8 = rng.getUint8();
3860 const deUint8 g8 = rng.getUint8();
3861 const deUint8 b8 = rng.getUint8();
3862 const deUint8 a8 = rng.getUint8();
3864 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3867 else if (m_scale == BLIT_SCALE_20)
3869 tcu::TextureLevel source (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
3870 const float xscale = ((float)m_srcImageWidth) / (float)m_imageWidth;
3871 const float yscale = ((float)m_srcImageHeight) / (float)m_imageHeight;
3873 for (deInt32 y = 0; y < m_srcImageHeight; y++)
3874 for (deInt32 x = 0; x < m_srcImageWidth; x++)
3876 const deUint8 r8 = rng.getUint8();
3877 const deUint8 g8 = rng.getUint8();
3878 const deUint8 b8 = rng.getUint8();
3879 const deUint8 a8 = rng.getUint8();
3881 source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
3884 for (deInt32 y = 0; y < m_imageHeight; y++)
3885 for (deInt32 x = 0; x < m_imageWidth; x++)
3886 refAccess.setPixel(source.getAccess().getPixelUint(int(float(x) * xscale), int(float(y) * yscale)), x, y);
3889 DE_FATAL("Unsupported scale");
3893 class ImageBlitToImage : public CmdCommand
3896 ImageBlitToImage (BlitScale scale) : m_scale(scale) {}
3897 ~ImageBlitToImage (void) {}
3898 const char* getName (void) const { return "ImageBlitToImage"; }
3900 void logPrepare (TestLog& log, size_t commandIndex) const;
3901 void prepare (PrepareContext& context);
3902 void logSubmit (TestLog& log, size_t commandIndex) const;
3903 void submit (SubmitContext& context);
3904 void verify (VerifyContext& context, size_t commandIndex);
3907 const BlitScale m_scale;
3908 deInt32 m_imageWidth;
3909 deInt32 m_imageHeight;
3910 vk::VkDeviceSize m_imageMemorySize;
3911 deInt32 m_dstImageWidth;
3912 deInt32 m_dstImageHeight;
3913 vk::Move<vk::VkImage> m_dstImage;
3914 vk::Move<vk::VkDeviceMemory> m_memory;
3917 void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
3919 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
3922 void ImageBlitToImage::prepare (PrepareContext& context)
3924 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3925 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3926 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3927 const vk::VkDevice device = context.getContext().getDevice();
3928 const vk::VkQueue queue = context.getContext().getQueue();
3929 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3930 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3932 m_imageWidth = context.getImageWidth();
3933 m_imageHeight = context.getImageHeight();
3934 m_imageMemorySize = context.getImageMemorySize();
3936 if (m_scale == BLIT_SCALE_10)
3938 m_dstImageWidth = context.getImageWidth();
3939 m_dstImageHeight = context.getImageHeight();
3941 else if (m_scale == BLIT_SCALE_20)
3943 m_dstImageWidth = context.getImageWidth() * 2;
3944 m_dstImageHeight = context.getImageHeight() * 2;
3947 DE_FATAL("Unsupportd blit scale");
3950 const vk::VkImageCreateInfo createInfo =
3952 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3956 vk::VK_IMAGE_TYPE_2D,
3957 vk::VK_FORMAT_R8G8B8A8_UNORM,
3959 (deUint32)m_dstImageWidth,
3960 (deUint32)m_dstImageHeight,
3963 1, 1, // mipLevels, arrayLayers
3964 vk::VK_SAMPLE_COUNT_1_BIT,
3966 vk::VK_IMAGE_TILING_OPTIMAL,
3967 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3968 vk::VK_SHARING_MODE_EXCLUSIVE,
3970 (deUint32)queueFamilies.size(),
3972 vk::VK_IMAGE_LAYOUT_UNDEFINED
3975 m_dstImage = vk::createImage(vkd, device, &createInfo);
3978 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3981 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3982 const vk::VkImageMemoryBarrier barrier =
3984 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3988 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3990 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3991 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3993 vk::VK_QUEUE_FAMILY_IGNORED,
3994 vk::VK_QUEUE_FAMILY_IGNORED,
3998 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4000 1, // Mip level count
4006 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
4008 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4009 queueRun(vkd, queue, *commandBuffer);
4013 void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
4015 log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
4018 void ImageBlitToImage::submit (SubmitContext& context)
4020 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4021 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4022 const vk::VkImageBlit region =
4026 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4042 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4056 vkd.cmdBlitImage(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion, vk::VK_FILTER_NEAREST);
4059 void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
4061 tcu::ResultCollector& resultCollector (context.getResultCollector());
4062 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4063 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4064 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4065 const vk::VkDevice device = context.getContext().getDevice();
4066 const vk::VkQueue queue = context.getContext().getQueue();
4067 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4068 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4069 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4070 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4071 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4073 const vk::VkImageMemoryBarrier imageBarrier =
4075 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4078 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4079 vk::VK_ACCESS_TRANSFER_READ_BIT,
4081 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4082 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4084 vk::VK_QUEUE_FAMILY_IGNORED,
4085 vk::VK_QUEUE_FAMILY_IGNORED,
4089 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4091 1, // Mip level count
4096 const vk::VkBufferMemoryBarrier bufferBarrier =
4098 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4101 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4102 vk::VK_ACCESS_HOST_READ_BIT,
4104 vk::VK_QUEUE_FAMILY_IGNORED,
4105 vk::VK_QUEUE_FAMILY_IGNORED,
4110 const vk::VkBufferImageCopy region =
4115 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4122 (deUint32)m_dstImageWidth,
4123 (deUint32)m_dstImageHeight,
4128 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4129 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4130 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4133 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4134 queueRun(vkd, queue, *commandBuffer);
4137 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
4139 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_dstImageWidth * m_dstImageHeight);
4141 if (m_scale == BLIT_SCALE_10)
4143 const deUint8* const data = (const deUint8*)ptr;
4144 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4145 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4147 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4148 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4150 else if (m_scale == BLIT_SCALE_20)
4152 const deUint8* const data = (const deUint8*)ptr;
4153 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4154 tcu::TextureLevel reference (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
4157 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4159 for (deInt32 y = 0; y < m_dstImageHeight; y++)
4160 for (deInt32 x = 0; x < m_dstImageWidth; x++)
4162 reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
4166 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4167 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4170 DE_FATAL("Unknown scale");
4172 vkd.unmapMemory(device, *memory);
4176 class PrepareRenderPassContext
4179 PrepareRenderPassContext (PrepareContext& context,
4180 vk::VkRenderPass renderPass,
4181 vk::VkFramebuffer framebuffer,
4182 deInt32 targetWidth,
4183 deInt32 targetHeight)
4184 : m_context (context)
4185 , m_renderPass (renderPass)
4186 , m_framebuffer (framebuffer)
4187 , m_targetWidth (targetWidth)
4188 , m_targetHeight (targetHeight)
4192 const Memory& getMemory (void) const { return m_context.getMemory(); }
4193 const Context& getContext (void) const { return m_context.getContext(); }
4194 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
4196 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
4197 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
4199 vk::VkImage getImage (void) const { return m_context.getImage(); }
4200 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
4201 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
4202 vk::VkImageLayout getImageLayout (void) const { return m_context.getImageLayout(); }
4204 deInt32 getTargetWidth (void) const { return m_targetWidth; }
4205 deInt32 getTargetHeight (void) const { return m_targetHeight; }
4207 vk::VkRenderPass getRenderPass (void) const { return m_renderPass; }
4210 PrepareContext& m_context;
4211 const vk::VkRenderPass m_renderPass;
4212 const vk::VkFramebuffer m_framebuffer;
4213 const deInt32 m_targetWidth;
4214 const deInt32 m_targetHeight;
4217 class VerifyRenderPassContext
4220 VerifyRenderPassContext (VerifyContext& context,
4221 deInt32 targetWidth,
4222 deInt32 targetHeight)
4223 : m_context (context)
4224 , m_referenceTarget (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
4228 const Context& getContext (void) const { return m_context.getContext(); }
4229 TestLog& getLog (void) const { return m_context.getLog(); }
4230 tcu::ResultCollector& getResultCollector (void) const { return m_context.getResultCollector(); }
4232 TextureLevel& getReferenceTarget (void) { return m_referenceTarget; }
4234 ReferenceMemory& getReference (void) { return m_context.getReference(); }
4235 TextureLevel& getReferenceImage (void) { return m_context.getReferenceImage();}
4238 VerifyContext& m_context;
4239 TextureLevel m_referenceTarget;
4243 class RenderPassCommand
4246 virtual ~RenderPassCommand (void) {}
4247 virtual const char* getName (void) const = 0;
4249 // Log things that are done during prepare
4250 virtual void logPrepare (TestLog&, size_t) const {}
4251 // Log submitted calls etc.
4252 virtual void logSubmit (TestLog&, size_t) const {}
4254 // Allocate vulkan resources and prepare for submit.
4255 virtual void prepare (PrepareRenderPassContext&) {}
4257 // Submit commands to command buffer.
4258 virtual void submit (SubmitContext&) {}
4261 virtual void verify (VerifyRenderPassContext&, size_t) {}
4264 class SubmitRenderPass : public CmdCommand
4267 SubmitRenderPass (const vector<RenderPassCommand*>& commands);
4268 ~SubmitRenderPass (void) {}
4269 const char* getName (void) const { return "SubmitRenderPass"; }
4271 void logPrepare (TestLog&, size_t) const;
4272 void logSubmit (TestLog&, size_t) const;
4274 void prepare (PrepareContext&);
4275 void submit (SubmitContext&);
4277 void verify (VerifyContext&, size_t);
4280 const deInt32 m_targetWidth;
4281 const deInt32 m_targetHeight;
4282 vk::Move<vk::VkRenderPass> m_renderPass;
4283 vk::Move<vk::VkDeviceMemory> m_colorTargetMemory;
4284 de::MovePtr<vk::Allocation> m_colorTargetMemory2;
4285 vk::Move<vk::VkImage> m_colorTarget;
4286 vk::Move<vk::VkImageView> m_colorTargetView;
4287 vk::Move<vk::VkFramebuffer> m_framebuffer;
4288 vector<RenderPassCommand*> m_commands;
4291 SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
4292 : m_targetWidth (256)
4293 , m_targetHeight (256)
4294 , m_commands (commands)
4298 void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
4300 const string sectionName (de::toString(commandIndex) + ":" + getName());
4301 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4303 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4305 RenderPassCommand& command = *m_commands[cmdNdx];
4306 command.logPrepare(log, cmdNdx);
4310 void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
4312 const string sectionName (de::toString(commandIndex) + ":" + getName());
4313 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4315 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4317 RenderPassCommand& command = *m_commands[cmdNdx];
4318 command.logSubmit(log, cmdNdx);
4322 void SubmitRenderPass::prepare (PrepareContext& context)
4324 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4325 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4326 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4327 const vk::VkDevice device = context.getContext().getDevice();
4328 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4330 const vk::VkAttachmentReference colorAttachments[] =
4332 { 0, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }
4334 const vk::VkSubpassDescription subpass =
4337 vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
4342 DE_LENGTH_OF_ARRAY(colorAttachments),
4349 const vk::VkAttachmentDescription attachment =
4352 vk::VK_FORMAT_R8G8B8A8_UNORM,
4353 vk::VK_SAMPLE_COUNT_1_BIT,
4355 vk::VK_ATTACHMENT_LOAD_OP_CLEAR,
4356 vk::VK_ATTACHMENT_STORE_OP_STORE,
4358 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
4359 vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,
4361 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4362 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
4365 const vk::VkImageCreateInfo createInfo =
4367 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
4371 vk::VK_IMAGE_TYPE_2D,
4372 vk::VK_FORMAT_R8G8B8A8_UNORM,
4373 { (deUint32)m_targetWidth, (deUint32)m_targetHeight, 1u },
4376 vk::VK_SAMPLE_COUNT_1_BIT,
4377 vk::VK_IMAGE_TILING_OPTIMAL,
4378 vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
4379 vk::VK_SHARING_MODE_EXCLUSIVE,
4380 (deUint32)queueFamilies.size(),
4382 vk::VK_IMAGE_LAYOUT_UNDEFINED
4385 m_colorTarget = vk::createImage(vkd, device, &createInfo);
4388 m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
4391 const vk::VkImageViewCreateInfo createInfo =
4393 vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4398 vk::VK_IMAGE_VIEW_TYPE_2D,
4399 vk::VK_FORMAT_R8G8B8A8_UNORM,
4401 vk::VK_COMPONENT_SWIZZLE_R,
4402 vk::VK_COMPONENT_SWIZZLE_G,
4403 vk::VK_COMPONENT_SWIZZLE_B,
4404 vk::VK_COMPONENT_SWIZZLE_A
4407 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4415 m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
4418 const vk::VkRenderPassCreateInfo createInfo =
4420 vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
4434 m_renderPass = vk::createRenderPass(vkd, device, &createInfo);
4438 const vk::VkImageView imageViews[] =
4442 const vk::VkFramebufferCreateInfo createInfo =
4444 vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4449 DE_LENGTH_OF_ARRAY(imageViews),
4451 (deUint32)m_targetWidth,
4452 (deUint32)m_targetHeight,
4456 m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
4460 PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
4462 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4464 RenderPassCommand& command = *m_commands[cmdNdx];
4465 command.prepare(renderpassContext);
4470 void SubmitRenderPass::submit (SubmitContext& context)
4472 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4473 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4474 const vk::VkClearValue clearValue = vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 1.0f);
4476 const vk::VkRenderPassBeginInfo beginInfo =
4478 vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
4484 { { 0, 0 }, { (deUint32)m_targetWidth, (deUint32)m_targetHeight } },
4489 vkd.cmdBeginRenderPass(commandBuffer, &beginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
4491 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4493 RenderPassCommand& command = *m_commands[cmdNdx];
4495 command.submit(context);
4498 vkd.cmdEndRenderPass(commandBuffer);
4501 void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
4503 TestLog& log (context.getLog());
4504 tcu::ResultCollector& resultCollector (context.getResultCollector());
4505 const string sectionName (de::toString(commandIndex) + ":" + getName());
4506 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4507 VerifyRenderPassContext verifyContext (context, m_targetWidth, m_targetHeight);
4509 tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4511 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4513 RenderPassCommand& command = *m_commands[cmdNdx];
4514 command.verify(verifyContext, cmdNdx);
4518 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4519 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4520 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4521 const vk::VkDevice device = context.getContext().getDevice();
4522 const vk::VkQueue queue = context.getContext().getQueue();
4523 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4524 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4525 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4526 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4527 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4529 const vk::VkImageMemoryBarrier imageBarrier =
4531 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4534 vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
4535 vk::VK_ACCESS_TRANSFER_READ_BIT,
4537 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4538 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4540 vk::VK_QUEUE_FAMILY_IGNORED,
4541 vk::VK_QUEUE_FAMILY_IGNORED,
4545 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4547 1, // Mip level count
4552 const vk::VkBufferMemoryBarrier bufferBarrier =
4554 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4557 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4558 vk::VK_ACCESS_HOST_READ_BIT,
4560 vk::VK_QUEUE_FAMILY_IGNORED,
4561 vk::VK_QUEUE_FAMILY_IGNORED,
4566 const vk::VkBufferImageCopy region =
4571 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4578 (deUint32)m_targetWidth,
4579 (deUint32)m_targetHeight,
4584 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4585 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4586 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4589 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4590 queueRun(vkd, queue, *commandBuffer);
4593 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
4595 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_targetWidth * m_targetHeight);
4598 const deUint8* const data = (const deUint8*)ptr;
4599 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
4600 const ConstPixelBufferAccess& refAccess (verifyContext.getReferenceTarget().getAccess());
4602 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4603 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4606 vkd.unmapMemory(device, *memory);
4611 class RenderBuffer : public RenderPassCommand
4616 RENDERAS_VERTEX_BUFFER,
4617 RENDERAS_INDEX_BUFFER,
4619 RenderBuffer (RenderAs renderAs) : m_renderAs(renderAs) {}
4620 ~RenderBuffer (void) {}
4622 const char* getName (void) const { return "RenderBuffer"; }
4623 void logPrepare (TestLog&, size_t) const;
4624 void logSubmit (TestLog&, size_t) const;
4625 void prepare (PrepareRenderPassContext&);
4626 void submit (SubmitContext& context);
4627 void verify (VerifyRenderPassContext&, size_t);
4630 const RenderAs m_renderAs;
4631 vk::Move<vk::VkPipeline> m_pipeline;
4632 vk::Move<vk::VkPipelineLayout> m_pipelineLayout;
4633 vk::VkDeviceSize m_bufferSize;
4635 static const vk::ProgramBinary& getVertexShader (const vk::ProgramCollection<vk::ProgramBinary>& collections, RenderAs renderAs)
4639 case RENDERAS_VERTEX_BUFFER:
4640 return collections.get("vertex-buffer.vert");
4642 case RENDERAS_INDEX_BUFFER:
4643 return collections.get("index-buffer.vert");
4646 DE_FATAL("Unknown renderAs");
4647 return collections.get("");
4652 void RenderBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4654 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4657 void RenderBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4659 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4662 void RenderBuffer::prepare (PrepareRenderPassContext& context)
4664 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4665 const vk::VkDevice device = context.getContext().getDevice();
4666 const vk::VkRenderPass renderPass = context.getRenderPass();
4667 const deUint32 subpass = 0;
4668 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, getVertexShader(context.getBinaryCollection(), m_renderAs), 0));
4669 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4671 m_bufferSize = context.getBufferSize();
4674 const vk::VkPipelineLayoutCreateInfo createInfo =
4676 vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
4685 m_pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
4689 const vk::VkPipelineShaderStageCreateInfo shaderStages[] =
4692 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4695 vk::VK_SHADER_STAGE_VERTEX_BIT,
4696 *vertexShaderModule,
4701 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4704 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
4705 *fragmentShaderModule,
4710 const vk::VkPipelineDepthStencilStateCreateInfo depthStencilState =
4712 vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
4717 vk::VK_COMPARE_OP_ALWAYS,
4721 vk::VK_STENCIL_OP_KEEP,
4722 vk::VK_STENCIL_OP_KEEP,
4723 vk::VK_STENCIL_OP_KEEP,
4724 vk::VK_COMPARE_OP_ALWAYS,
4730 vk::VK_STENCIL_OP_KEEP,
4731 vk::VK_STENCIL_OP_KEEP,
4732 vk::VK_STENCIL_OP_KEEP,
4733 vk::VK_COMPARE_OP_ALWAYS,
4741 const vk::VkVertexInputBindingDescription vertexBindingDescriptions[] =
4746 vk::VK_VERTEX_INPUT_RATE_VERTEX
4749 const vk::VkVertexInputAttributeDescription vertexAttributeDescriptions[] =
4754 vk::VK_FORMAT_R8G8_UNORM,
4758 const vk::VkPipelineVertexInputStateCreateInfo vertexInputState =
4760 vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
4764 m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexBindingDescriptions) : 0u,
4765 m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexBindingDescriptions : DE_NULL,
4767 m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexAttributeDescriptions) : 0u,
4768 m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexAttributeDescriptions : DE_NULL,
4770 const vk::VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
4772 vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
4775 vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
4778 const vk::VkViewport viewports[] =
4780 { 0.0f, 0.0f, (float)context.getTargetWidth(), (float)context.getTargetHeight(), 0.0f, 1.0f }
4782 const vk::VkRect2D scissors[] =
4784 { { 0, 0 }, { (deUint32)context.getTargetWidth(), (deUint32)context.getTargetHeight() } }
4786 const vk::VkPipelineViewportStateCreateInfo viewportState =
4788 vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
4791 DE_LENGTH_OF_ARRAY(viewports),
4793 DE_LENGTH_OF_ARRAY(scissors),
4796 const vk::VkPipelineRasterizationStateCreateInfo rasterState =
4798 vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
4804 vk::VK_POLYGON_MODE_FILL,
4805 vk::VK_CULL_MODE_NONE,
4806 vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,
4813 const vk::VkSampleMask sampleMask = ~0u;
4814 const vk::VkPipelineMultisampleStateCreateInfo multisampleState =
4816 vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
4820 vk::VK_SAMPLE_COUNT_1_BIT,
4827 const vk::VkPipelineColorBlendAttachmentState attachments[] =
4831 vk::VK_BLEND_FACTOR_ONE,
4832 vk::VK_BLEND_FACTOR_ZERO,
4833 vk::VK_BLEND_OP_ADD,
4834 vk::VK_BLEND_FACTOR_ONE,
4835 vk::VK_BLEND_FACTOR_ZERO,
4836 vk::VK_BLEND_OP_ADD,
4837 (vk::VK_COLOR_COMPONENT_R_BIT|
4838 vk::VK_COLOR_COMPONENT_G_BIT|
4839 vk::VK_COLOR_COMPONENT_B_BIT|
4840 vk::VK_COLOR_COMPONENT_A_BIT)
4843 const vk::VkPipelineColorBlendStateCreateInfo colorBlendState =
4845 vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
4850 vk::VK_LOGIC_OP_COPY,
4851 DE_LENGTH_OF_ARRAY(attachments),
4853 { 0.0f, 0.0f, 0.0f, 0.0f }
4855 const vk::VkGraphicsPipelineCreateInfo createInfo =
4857 vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
4861 DE_LENGTH_OF_ARRAY(shaderStages),
4865 &inputAssemblyState,
4880 m_pipeline = vk::createGraphicsPipeline(vkd, device, 0, &createInfo);
4884 void RenderBuffer::submit (SubmitContext& context)
4886 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4887 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4888 const vk::VkDeviceSize offset = 0;
4889 const vk::VkBuffer buffer = context.getBuffer();
4891 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4893 if (m_renderAs == RENDERAS_VERTEX_BUFFER)
4895 vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
4896 vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
4898 else if (m_renderAs == RENDERAS_INDEX_BUFFER)
4900 vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
4901 vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
4904 DE_FATAL("Unknown renderAs");
4907 void RenderBuffer::verify (VerifyRenderPassContext& context, size_t)
4909 for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4911 const deUint8 x = context.getReference().get(pos * 2);
4912 const deUint8 y = context.getReference().get((pos * 2) + 1);
4914 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4932 OP_BUFFER_BINDMEMORY,
4934 OP_QUEUE_WAIT_FOR_IDLE,
4935 OP_DEVICE_WAIT_FOR_IDLE,
4937 OP_COMMAND_BUFFER_BEGIN,
4938 OP_COMMAND_BUFFER_END,
4940 // Buffer transfer operations
4944 OP_BUFFER_COPY_TO_BUFFER,
4945 OP_BUFFER_COPY_FROM_BUFFER,
4947 OP_BUFFER_COPY_TO_IMAGE,
4948 OP_BUFFER_COPY_FROM_IMAGE,
4952 OP_IMAGE_BINDMEMORY,
4954 OP_IMAGE_TRANSITION_LAYOUT,
4956 OP_IMAGE_COPY_TO_BUFFER,
4957 OP_IMAGE_COPY_FROM_BUFFER,
4959 OP_IMAGE_COPY_TO_IMAGE,
4960 OP_IMAGE_COPY_FROM_IMAGE,
4962 OP_IMAGE_BLIT_TO_IMAGE,
4963 OP_IMAGE_BLIT_FROM_IMAGE,
4967 OP_PIPELINE_BARRIER_GLOBAL,
4968 OP_PIPELINE_BARRIER_BUFFER,
4969 OP_PIPELINE_BARRIER_IMAGE,
4971 // Renderpass operations
4972 OP_RENDERPASS_BEGIN,
4975 // Commands inside render pass
4976 OP_RENDER_VERTEX_BUFFER,
4977 OP_RENDER_INDEX_BUFFER
4983 STAGE_COMMAND_BUFFER,
4988 vk::VkAccessFlags getWriteAccessFlags (void)
4990 return vk::VK_ACCESS_SHADER_WRITE_BIT
4991 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
4992 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
4993 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
4994 | vk::VK_ACCESS_HOST_WRITE_BIT
4995 | vk::VK_ACCESS_MEMORY_WRITE_BIT;
4998 bool isWriteAccess (vk::VkAccessFlagBits access)
5000 return (getWriteAccessFlags() & access) != 0;
5006 CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
5008 bool isValid (vk::VkPipelineStageFlagBits stage,
5009 vk::VkAccessFlagBits access) const;
5011 void perform (vk::VkPipelineStageFlagBits stage,
5012 vk::VkAccessFlagBits access);
5014 void submitCommandBuffer (void);
5015 void waitForIdle (void);
5017 void getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5018 vk::VkAccessFlags& srcAccesses,
5019 vk::VkPipelineStageFlags& dstStages,
5020 vk::VkAccessFlags& dstAccesses) const;
5022 void barrier (vk::VkPipelineStageFlags srcStages,
5023 vk::VkAccessFlags srcAccesses,
5024 vk::VkPipelineStageFlags dstStages,
5025 vk::VkAccessFlags dstAccesses);
5027 void imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5028 vk::VkAccessFlags srcAccesses,
5029 vk::VkPipelineStageFlags dstStages,
5030 vk::VkAccessFlags dstAccesses);
5032 void checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5033 vk::VkAccessFlags srcAccesses,
5034 vk::VkPipelineStageFlags dstStages,
5035 vk::VkAccessFlags dstAccesses);
5037 // Everything is clean and there is no need for barriers
5038 bool isClean (void) const;
5040 vk::VkPipelineStageFlags getAllowedStages (void) const { return m_allowedStages; }
5041 vk::VkAccessFlags getAllowedAcceses (void) const { return m_allowedAccesses; }
5043 // Limit which stages and accesses are used by the CacheState tracker
5044 const vk::VkPipelineStageFlags m_allowedStages;
5045 const vk::VkAccessFlags m_allowedAccesses;
5047 // [dstStage][srcStage] = srcAccesses
5048 // In stage dstStage write srcAccesses from srcStage are not yet available
5049 vk::VkAccessFlags m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5050 // Latest pipeline transition is not available in stage
5051 bool m_unavailableLayoutTransition[PIPELINESTAGE_LAST];
5052 // [dstStage] = dstAccesses
5053 // In stage dstStage ops with dstAccesses are not yet visible
5054 vk::VkAccessFlags m_invisibleOperations[PIPELINESTAGE_LAST];
5056 // [dstStage] = srcStage
5057 // Memory operation in srcStage have not completed before dstStage
5058 vk::VkPipelineStageFlags m_incompleteOperations[PIPELINESTAGE_LAST];
5061 CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
5062 : m_allowedStages (allowedStages)
5063 , m_allowedAccesses (allowedAccesses)
5065 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5067 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5069 if ((dstStage_ & m_allowedStages) == 0)
5072 // All operations are initially visible
5073 m_invisibleOperations[dstStage] = 0;
5075 // There are no incomplete read operations initially
5076 m_incompleteOperations[dstStage] = 0;
5078 // There are no incomplete layout transitions
5079 m_unavailableLayoutTransition[dstStage] = false;
5081 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5083 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5085 if ((srcStage_ & m_allowedStages) == 0)
5088 // There are no write operations that are not yet available
5090 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5095 bool CacheState::isValid (vk::VkPipelineStageFlagBits stage,
5096 vk::VkAccessFlagBits access) const
5098 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5099 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5101 const PipelineStage dstStage = pipelineStageFlagToPipelineStage(stage);
5103 // Previous operations are not visible to access on stage
5104 if (m_unavailableLayoutTransition[dstStage] || (m_invisibleOperations[dstStage] & access) != 0)
5107 if (isWriteAccess(access))
5109 // Memory operations from other stages have not completed before
5111 if (m_incompleteOperations[dstStage] != 0)
5118 void CacheState::perform (vk::VkPipelineStageFlagBits stage,
5119 vk::VkAccessFlagBits access)
5121 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5122 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5124 const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
5126 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5128 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5130 if ((dstStage_ & m_allowedStages) == 0)
5133 // Mark stage as incomplete for all stages
5134 m_incompleteOperations[dstStage] |= stage;
5136 if (isWriteAccess(access))
5138 // Mark all accesses from all stages invisible
5139 m_invisibleOperations[dstStage] |= m_allowedAccesses;
5141 // Mark write access from srcStage unavailable to all stages
5142 m_unavailableWriteOperations[dstStage][srcStage] |= access;
5147 void CacheState::submitCommandBuffer (void)
5149 // Flush all host writes and reads
5150 barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
5151 m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
5156 void CacheState::waitForIdle (void)
5158 // Make all writes available
5159 barrier(m_allowedStages,
5160 m_allowedAccesses & getWriteAccessFlags(),
5164 // Make all writes visible on device side
5165 barrier(m_allowedStages,
5167 m_allowedStages & (~vk::VK_PIPELINE_STAGE_HOST_BIT),
5171 void CacheState::getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5172 vk::VkAccessFlags& srcAccesses,
5173 vk::VkPipelineStageFlags& dstStages,
5174 vk::VkAccessFlags& dstAccesses) const
5181 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5183 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5185 if ((dstStage_ & m_allowedStages) == 0)
5188 // Make sure all previous operation are complete in all stages
5189 if (m_incompleteOperations[dstStage])
5191 dstStages |= dstStage_;
5192 srcStages |= m_incompleteOperations[dstStage];
5195 // Make sure all read operations are visible in dstStage
5196 if (m_invisibleOperations[dstStage])
5198 dstStages |= dstStage_;
5199 dstAccesses |= m_invisibleOperations[dstStage];
5202 // Make sure all write operations fro mall stages are available
5203 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5205 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5207 if ((srcStage_ & m_allowedStages) == 0)
5210 if (m_unavailableWriteOperations[dstStage][srcStage])
5212 dstStages |= dstStage_;
5213 srcStages |= dstStage_;
5214 srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
5217 if (m_unavailableLayoutTransition[dstStage] && !m_unavailableLayoutTransition[srcStage])
5219 // Add dependency between srcStage and dstStage if layout transition has not completed in dstStage,
5220 // but has completed in srcStage.
5221 dstStages |= dstStage_;
5222 srcStages |= dstStage_;
5227 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5228 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5229 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5230 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5233 void CacheState::checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5234 vk::VkAccessFlags srcAccesses,
5235 vk::VkPipelineStageFlags dstStages,
5236 vk::VkAccessFlags dstAccesses)
5238 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5239 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5240 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5241 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5243 DE_UNREF(srcStages);
5244 DE_UNREF(srcAccesses);
5246 DE_UNREF(dstStages);
5247 DE_UNREF(dstAccesses);
5249 #if defined(DE_DEBUG)
5250 // Check that all stages have completed before srcStages or are in srcStages.
5252 vk::VkPipelineStageFlags completedStages = srcStages;
5254 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5256 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5258 if ((srcStage_ & srcStages) == 0)
5261 completedStages |= (~m_incompleteOperations[srcStage]);
5264 DE_ASSERT((completedStages & m_allowedStages) == m_allowedStages);
5267 // Check that any write is available at least in one stage. Since all stages are complete even single flush is enough.
5268 if ((getWriteAccessFlags() & m_allowedAccesses) != 0 && (srcAccesses & getWriteAccessFlags()) == 0)
5270 bool anyWriteAvailable = false;
5272 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5274 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5276 if ((dstStage_ & m_allowedStages) == 0)
5279 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5281 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5283 if ((srcStage_ & m_allowedStages) == 0)
5286 if (m_unavailableWriteOperations[dstStage][srcStage] != (getWriteAccessFlags() & m_allowedAccesses))
5288 anyWriteAvailable = true;
5294 DE_ASSERT(anyWriteAvailable);
5299 void CacheState::imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5300 vk::VkAccessFlags srcAccesses,
5301 vk::VkPipelineStageFlags dstStages,
5302 vk::VkAccessFlags dstAccesses)
5304 checkImageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
5306 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5308 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5310 if ((dstStage_ & m_allowedStages) == 0)
5313 // All stages are incomplete after the barrier except each dstStage in it self.
5314 m_incompleteOperations[dstStage] = m_allowedStages & (~dstStage_);
5316 // All memory operations are invisible unless they are listed in dstAccess
5317 m_invisibleOperations[dstStage] = m_allowedAccesses & (~dstAccesses);
5319 // Layout transition is unavailable in stage unless it was listed in dstStages
5320 m_unavailableLayoutTransition[dstStage]= (dstStage_ & dstStages) == 0;
5322 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5324 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5326 if ((srcStage_ & m_allowedStages) == 0)
5329 // All write operations are available after layout transition
5330 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5335 void CacheState::barrier (vk::VkPipelineStageFlags srcStages,
5336 vk::VkAccessFlags srcAccesses,
5337 vk::VkPipelineStageFlags dstStages,
5338 vk::VkAccessFlags dstAccesses)
5340 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5341 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5342 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5343 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5347 vk::VkPipelineStageFlags oldIncompleteOperations[PIPELINESTAGE_LAST];
5348 vk::VkAccessFlags oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5349 bool oldUnavailableLayoutTransition[PIPELINESTAGE_LAST];
5351 deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
5352 deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
5353 deMemcpy(oldUnavailableLayoutTransition, m_unavailableLayoutTransition, sizeof(oldUnavailableLayoutTransition));
5355 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5357 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5359 if ((srcStage_ & srcStages) == 0)
5362 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5364 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5366 if ((dstStage_ & dstStages) == 0)
5369 // Stages that have completed before srcStage have also completed before dstStage
5370 m_incompleteOperations[dstStage] &= oldIncompleteOperations[srcStage];
5372 // Image layout transition in srcStage are now available in dstStage
5373 m_unavailableLayoutTransition[dstStage] &= oldUnavailableLayoutTransition[srcStage];
5375 for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
5377 const PipelineStage sharedStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
5379 if ((sharedStage_ & m_allowedStages) == 0)
5382 // Writes that are available in srcStage are also available in dstStage
5383 m_unavailableWriteOperations[dstStage][sharedStage] &= oldUnavailableWriteOperations[srcStage][sharedStage];
5390 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5392 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5393 bool allWritesAvailable = true;
5395 if ((dstStage_ & dstStages) == 0)
5398 // Operations in srcStages have completed before any stage in dstStages
5399 m_incompleteOperations[dstStage] &= ~srcStages;
5401 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5403 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5405 if ((srcStage_ & m_allowedStages) == 0)
5408 // Make srcAccesses from srcStage available in dstStage
5409 if ((srcStage_ & srcStages) != 0)
5410 m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
5412 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5413 allWritesAvailable = false;
5416 // If all writes are available in dstStage make dstAccesses also visible
5417 if (allWritesAvailable)
5418 m_invisibleOperations[dstStage] &= ~dstAccesses;
5422 bool CacheState::isClean (void) const
5424 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5426 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5428 if ((dstStage_ & m_allowedStages) == 0)
5431 // Some operations are not visible to some stages
5432 if (m_invisibleOperations[dstStage] != 0)
5435 // There are operation that have not completed yet
5436 if (m_incompleteOperations[dstStage] != 0)
5439 // Layout transition has not completed yet
5440 if (m_unavailableLayoutTransition[dstStage])
5443 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5445 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5447 if ((srcStage_ & m_allowedStages) == 0)
5450 // Some write operations are not available yet
5451 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5461 State (Usage usage, deUint32 seed)
5462 : stage (STAGE_HOST)
5463 , cache (usageToStageFlags(usage), usageToAccessFlags(usage))
5466 , hostInvalidated (true)
5467 , hostFlushed (true)
5468 , memoryDefined (false)
5470 , hasBoundBufferMemory (false)
5472 , hasBoundImageMemory (false)
5473 , imageLayout (vk::VK_IMAGE_LAYOUT_UNDEFINED)
5474 , imageDefined (false)
5477 , commandBufferIsEmpty (true)
5486 bool hostInvalidated;
5491 bool hasBoundBufferMemory;
5494 bool hasBoundImageMemory;
5495 vk::VkImageLayout imageLayout;
5501 bool commandBufferIsEmpty;
5504 void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
5506 if (state.stage == STAGE_HOST)
5508 if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
5510 // Host memory operations
5513 ops.push_back(OP_UNMAP);
5515 // Avoid flush and finish if they are not needed
5516 if (!state.hostFlushed)
5517 ops.push_back(OP_MAP_FLUSH);
5519 if (!state.hostInvalidated
5521 && ((usage & USAGE_HOST_READ) == 0
5522 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5523 && ((usage & USAGE_HOST_WRITE) == 0
5524 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
5526 ops.push_back(OP_MAP_INVALIDATE);
5529 if (usage & USAGE_HOST_READ
5530 && usage & USAGE_HOST_WRITE
5531 && state.memoryDefined
5532 && state.hostInvalidated
5534 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
5535 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5537 ops.push_back(OP_MAP_MODIFY);
5540 if (usage & USAGE_HOST_READ
5541 && state.memoryDefined
5542 && state.hostInvalidated
5544 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5546 ops.push_back(OP_MAP_READ);
5549 if (usage & USAGE_HOST_WRITE
5550 && state.hostInvalidated
5552 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
5554 ops.push_back(OP_MAP_WRITE);
5558 ops.push_back(OP_MAP);
5561 if (state.hasBoundBufferMemory && state.queueIdle)
5563 // \note Destroy only buffers after they have been bound
5564 ops.push_back(OP_BUFFER_DESTROY);
5568 if (state.hasBuffer)
5570 if (!state.hasBoundBufferMemory)
5571 ops.push_back(OP_BUFFER_BINDMEMORY);
5573 else if (!state.hasImage && supportsBuffers) // Avoid creating buffer if there is already image
5574 ops.push_back(OP_BUFFER_CREATE);
5577 if (state.hasBoundImageMemory && state.queueIdle)
5579 // \note Destroy only image after they have been bound
5580 ops.push_back(OP_IMAGE_DESTROY);
5586 if (!state.hasBoundImageMemory)
5587 ops.push_back(OP_IMAGE_BINDMEMORY);
5589 else if (!state.hasBuffer && supportsImages) // Avoid creating image if there is already buffer
5590 ops.push_back(OP_IMAGE_CREATE);
5593 // Host writes must be flushed before GPU commands and there must be
5594 // buffer or image for GPU commands
5595 if (state.hostFlushed
5596 && (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
5597 && (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
5598 && (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
5600 ops.push_back(OP_COMMAND_BUFFER_BEGIN);
5603 if (!state.deviceIdle)
5604 ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
5606 if (!state.queueIdle)
5607 ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
5609 else if (state.stage == STAGE_COMMAND_BUFFER)
5611 if (!state.cache.isClean())
5613 ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
5616 ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
5618 if (state.hasBuffer)
5619 ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
5622 if (state.hasBoundBufferMemory)
5624 if (usage & USAGE_TRANSFER_DST
5625 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5627 ops.push_back(OP_BUFFER_FILL);
5628 ops.push_back(OP_BUFFER_UPDATE);
5629 ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
5630 ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
5633 if (usage & USAGE_TRANSFER_SRC
5634 && state.memoryDefined
5635 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5637 ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
5638 ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
5642 if (state.hasBoundImageMemory)
5644 ops.push_back(OP_IMAGE_TRANSITION_LAYOUT);
5647 if (usage & USAGE_TRANSFER_DST
5648 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
5649 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
5650 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5652 ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
5653 ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
5654 ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
5657 if (usage & USAGE_TRANSFER_SRC
5658 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
5659 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
5660 && state.imageDefined
5661 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5663 ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
5664 ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
5665 ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
5670 // \todo [2016-03-09 mika] Add other usages?
5671 if (((usage & USAGE_VERTEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5672 || ((usage & USAGE_INDEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT)))
5673 ops.push_back(OP_RENDERPASS_BEGIN);
5675 // \note This depends on previous operations and has to be always the
5676 // last command buffer operation check
5677 if (ops.empty() || !state.commandBufferIsEmpty)
5678 ops.push_back(OP_COMMAND_BUFFER_END);
5680 else if (state.stage == STAGE_RENDER_PASS)
5682 if (usage & USAGE_VERTEX_BUFFER
5683 && state.memoryDefined
5684 && state.hasBoundBufferMemory
5685 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5687 ops.push_back(OP_RENDER_VERTEX_BUFFER);
5690 if (usage & USAGE_INDEX_BUFFER
5691 && state.memoryDefined
5692 && state.hasBoundBufferMemory
5693 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
5695 ops.push_back(OP_RENDER_INDEX_BUFFER);
5698 ops.push_back(OP_RENDERPASS_END);
5701 DE_FATAL("Unknown stage");
5704 bool layoutSupportedByUsage (Usage usage, vk::VkImageLayout layout)
5708 case vk::VK_IMAGE_LAYOUT_GENERAL:
5711 case vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
5712 return (usage & USAGE_COLOR_ATTACHMENT) != 0;
5714 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
5715 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
5717 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
5718 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
5720 case vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
5721 // \todo [2016-03-09 mika] Should include input attachment
5722 return (usage & USAGE_TEXTURE_SAMPLED) != 0;
5724 case vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
5725 return (usage & USAGE_TRANSFER_SRC) != 0;
5727 case vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
5728 return (usage & USAGE_TRANSFER_DST) != 0;
5730 case vk::VK_IMAGE_LAYOUT_PREINITIALIZED:
5734 DE_FATAL("Unknown layout");
5739 vk::VkImageLayout getRandomNextLayout (de::Random& rng,
5741 vk::VkImageLayout previousLayout)
5743 const vk::VkImageLayout layouts[] =
5745 vk::VK_IMAGE_LAYOUT_GENERAL,
5746 vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
5747 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
5748 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
5749 vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
5750 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
5751 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
5753 size_t possibleLayoutCount = 0;
5755 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
5757 const vk::VkImageLayout layout = layouts[layoutNdx];
5759 if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
5760 possibleLayoutCount++;
5763 size_t nextLayoutNdx = ((size_t)rng.getUint64()) % possibleLayoutCount;
5765 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
5767 const vk::VkImageLayout layout = layouts[layoutNdx];
5769 if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
5771 if (nextLayoutNdx == 0)
5778 DE_FATAL("Unreachable");
5779 return vk::VK_IMAGE_LAYOUT_UNDEFINED;
5782 void applyOp (State& state, const Memory& memory, Op op, Usage usage)
5787 DE_ASSERT(state.stage == STAGE_HOST);
5788 DE_ASSERT(!state.mapped);
5789 state.mapped = true;
5793 DE_ASSERT(state.stage == STAGE_HOST);
5794 DE_ASSERT(state.mapped);
5795 state.mapped = false;
5799 DE_ASSERT(state.stage == STAGE_HOST);
5800 DE_ASSERT(!state.hostFlushed);
5801 state.hostFlushed = true;
5804 case OP_MAP_INVALIDATE:
5805 DE_ASSERT(state.stage == STAGE_HOST);
5806 DE_ASSERT(!state.hostInvalidated);
5807 state.hostInvalidated = true;
5811 DE_ASSERT(state.stage == STAGE_HOST);
5812 DE_ASSERT(state.hostInvalidated);
5813 state.rng.getUint32();
5817 DE_ASSERT(state.stage == STAGE_HOST);
5818 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5819 state.hostFlushed = false;
5821 state.memoryDefined = true;
5822 state.imageDefined = false;
5823 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5824 state.rng.getUint32();
5828 DE_ASSERT(state.stage == STAGE_HOST);
5829 DE_ASSERT(state.hostInvalidated);
5831 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5832 state.hostFlushed = false;
5834 state.rng.getUint32();
5837 case OP_BUFFER_CREATE:
5838 DE_ASSERT(state.stage == STAGE_HOST);
5839 DE_ASSERT(!state.hasBuffer);
5841 state.hasBuffer = true;
5844 case OP_BUFFER_DESTROY:
5845 DE_ASSERT(state.stage == STAGE_HOST);
5846 DE_ASSERT(state.hasBuffer);
5847 DE_ASSERT(state.hasBoundBufferMemory);
5849 state.hasBuffer = false;
5850 state.hasBoundBufferMemory = false;
5853 case OP_BUFFER_BINDMEMORY:
5854 DE_ASSERT(state.stage == STAGE_HOST);
5855 DE_ASSERT(state.hasBuffer);
5856 DE_ASSERT(!state.hasBoundBufferMemory);
5858 state.hasBoundBufferMemory = true;
5861 case OP_IMAGE_CREATE:
5862 DE_ASSERT(state.stage == STAGE_HOST);
5863 DE_ASSERT(!state.hasImage);
5864 DE_ASSERT(!state.hasBuffer);
5866 state.hasImage = true;
5869 case OP_IMAGE_DESTROY:
5870 DE_ASSERT(state.stage == STAGE_HOST);
5871 DE_ASSERT(state.hasImage);
5872 DE_ASSERT(state.hasBoundImageMemory);
5874 state.hasImage = false;
5875 state.hasBoundImageMemory = false;
5876 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5877 state.imageDefined = false;
5880 case OP_IMAGE_BINDMEMORY:
5881 DE_ASSERT(state.stage == STAGE_HOST);
5882 DE_ASSERT(state.hasImage);
5883 DE_ASSERT(!state.hasBoundImageMemory);
5885 state.hasBoundImageMemory = true;
5888 case OP_IMAGE_TRANSITION_LAYOUT:
5890 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5891 DE_ASSERT(state.hasImage);
5892 DE_ASSERT(state.hasBoundImageMemory);
5894 // \todo [2016-03-09 mika] Support linear tiling and predefined data
5895 const vk::VkImageLayout srcLayout = state.rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
5896 const vk::VkImageLayout dstLayout = getRandomNextLayout(state.rng, usage, srcLayout);
5898 vk::VkPipelineStageFlags dirtySrcStages;
5899 vk::VkAccessFlags dirtySrcAccesses;
5900 vk::VkPipelineStageFlags dirtyDstStages;
5901 vk::VkAccessFlags dirtyDstAccesses;
5903 vk::VkPipelineStageFlags srcStages;
5904 vk::VkAccessFlags srcAccesses;
5905 vk::VkPipelineStageFlags dstStages;
5906 vk::VkAccessFlags dstAccesses;
5908 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
5910 // Try masking some random bits
5911 srcStages = dirtySrcStages;
5912 srcAccesses = dirtySrcAccesses;
5914 dstStages = state.cache.getAllowedStages() & state.rng.getUint32();
5915 dstAccesses = state.cache.getAllowedAcceses() & state.rng.getUint32();
5917 // If there are no bits in dst stage mask use all stages
5918 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
5921 srcStages = dstStages;
5923 if (srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
5924 state.imageDefined = false;
5926 state.commandBufferIsEmpty = false;
5927 state.imageLayout = dstLayout;
5928 state.memoryDefined = false;
5929 state.cache.imageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
5933 case OP_QUEUE_WAIT_FOR_IDLE:
5934 DE_ASSERT(state.stage == STAGE_HOST);
5935 DE_ASSERT(!state.queueIdle);
5937 state.queueIdle = true;
5939 state.cache.waitForIdle();
5942 case OP_DEVICE_WAIT_FOR_IDLE:
5943 DE_ASSERT(state.stage == STAGE_HOST);
5944 DE_ASSERT(!state.deviceIdle);
5946 state.queueIdle = true;
5947 state.deviceIdle = true;
5949 state.cache.waitForIdle();
5952 case OP_COMMAND_BUFFER_BEGIN:
5953 DE_ASSERT(state.stage == STAGE_HOST);
5954 state.stage = STAGE_COMMAND_BUFFER;
5955 state.commandBufferIsEmpty = true;
5956 // Makes host writes visible to command buffer
5957 state.cache.submitCommandBuffer();
5960 case OP_COMMAND_BUFFER_END:
5961 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5962 state.stage = STAGE_HOST;
5963 state.queueIdle = false;
5964 state.deviceIdle = false;
5967 case OP_BUFFER_COPY_FROM_BUFFER:
5968 case OP_BUFFER_COPY_FROM_IMAGE:
5969 case OP_BUFFER_UPDATE:
5970 case OP_BUFFER_FILL:
5971 state.rng.getUint32();
5972 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5974 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5975 state.hostInvalidated = false;
5977 state.commandBufferIsEmpty = false;
5978 state.memoryDefined = true;
5979 state.imageDefined = false;
5980 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5981 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
5984 case OP_BUFFER_COPY_TO_BUFFER:
5985 case OP_BUFFER_COPY_TO_IMAGE:
5986 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5988 state.commandBufferIsEmpty = false;
5989 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
5992 case OP_IMAGE_BLIT_FROM_IMAGE:
5993 state.rng.getBool();
5995 case OP_IMAGE_COPY_FROM_BUFFER:
5996 case OP_IMAGE_COPY_FROM_IMAGE:
5997 state.rng.getUint32();
5998 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6000 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6001 state.hostInvalidated = false;
6003 state.commandBufferIsEmpty = false;
6004 state.memoryDefined = false;
6005 state.imageDefined = true;
6006 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
6009 case OP_IMAGE_BLIT_TO_IMAGE:
6010 state.rng.getBool();
6012 case OP_IMAGE_COPY_TO_BUFFER:
6013 case OP_IMAGE_COPY_TO_IMAGE:
6014 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6016 state.commandBufferIsEmpty = false;
6017 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
6020 case OP_PIPELINE_BARRIER_GLOBAL:
6021 case OP_PIPELINE_BARRIER_BUFFER:
6022 case OP_PIPELINE_BARRIER_IMAGE:
6024 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6026 vk::VkPipelineStageFlags dirtySrcStages;
6027 vk::VkAccessFlags dirtySrcAccesses;
6028 vk::VkPipelineStageFlags dirtyDstStages;
6029 vk::VkAccessFlags dirtyDstAccesses;
6031 vk::VkPipelineStageFlags srcStages;
6032 vk::VkAccessFlags srcAccesses;
6033 vk::VkPipelineStageFlags dstStages;
6034 vk::VkAccessFlags dstAccesses;
6036 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6038 // Try masking some random bits
6039 srcStages = dirtySrcStages & state.rng.getUint32();
6040 srcAccesses = dirtySrcAccesses & state.rng.getUint32();
6042 dstStages = dirtyDstStages & state.rng.getUint32();
6043 dstAccesses = dirtyDstAccesses & state.rng.getUint32();
6045 // If there are no bits in stage mask use the original dirty stages
6046 srcStages = srcStages ? srcStages : dirtySrcStages;
6047 dstStages = dstStages ? dstStages : dirtyDstStages;
6050 srcStages = dstStages;
6052 state.commandBufferIsEmpty = false;
6053 state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
6057 case OP_RENDERPASS_BEGIN:
6059 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6061 state.stage = STAGE_RENDER_PASS;
6065 case OP_RENDERPASS_END:
6067 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6069 state.stage = STAGE_COMMAND_BUFFER;
6073 case OP_RENDER_VERTEX_BUFFER:
6075 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6077 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
6081 case OP_RENDER_INDEX_BUFFER:
6083 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6085 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
6090 DE_FATAL("Unknown op");
6094 de::MovePtr<Command> createHostCommand (Op op,
6097 vk::VkSharingMode sharing)
6101 case OP_MAP: return de::MovePtr<Command>(new Map());
6102 case OP_UNMAP: return de::MovePtr<Command>(new UnMap());
6104 case OP_MAP_FLUSH: return de::MovePtr<Command>(new Flush());
6105 case OP_MAP_INVALIDATE: return de::MovePtr<Command>(new Invalidate());
6107 case OP_MAP_READ: return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
6108 case OP_MAP_WRITE: return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
6109 case OP_MAP_MODIFY: return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
6111 case OP_BUFFER_CREATE: return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
6112 case OP_BUFFER_DESTROY: return de::MovePtr<Command>(new DestroyBuffer());
6113 case OP_BUFFER_BINDMEMORY: return de::MovePtr<Command>(new BindBufferMemory());
6115 case OP_IMAGE_CREATE: return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
6116 case OP_IMAGE_DESTROY: return de::MovePtr<Command>(new DestroyImage());
6117 case OP_IMAGE_BINDMEMORY: return de::MovePtr<Command>(new BindImageMemory());
6119 case OP_QUEUE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new QueueWaitIdle());
6120 case OP_DEVICE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new DeviceWaitIdle());
6123 DE_FATAL("Unknown op");
6124 return de::MovePtr<Command>(DE_NULL);
6128 de::MovePtr<CmdCommand> createCmdCommand (de::Random& rng,
6135 case OP_BUFFER_FILL: return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
6136 case OP_BUFFER_UPDATE: return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
6137 case OP_BUFFER_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
6138 case OP_BUFFER_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
6140 case OP_BUFFER_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyToImage());
6141 case OP_BUFFER_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
6143 case OP_IMAGE_TRANSITION_LAYOUT:
6145 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6146 DE_ASSERT(state.hasImage);
6147 DE_ASSERT(state.hasBoundImageMemory);
6149 const vk::VkImageLayout srcLayout = rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
6150 const vk::VkImageLayout dstLayout = getRandomNextLayout(rng, usage, srcLayout);
6152 vk::VkPipelineStageFlags dirtySrcStages;
6153 vk::VkAccessFlags dirtySrcAccesses;
6154 vk::VkPipelineStageFlags dirtyDstStages;
6155 vk::VkAccessFlags dirtyDstAccesses;
6157 vk::VkPipelineStageFlags srcStages;
6158 vk::VkAccessFlags srcAccesses;
6159 vk::VkPipelineStageFlags dstStages;
6160 vk::VkAccessFlags dstAccesses;
6162 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6164 // Try masking some random bits
6165 srcStages = dirtySrcStages;
6166 srcAccesses = dirtySrcAccesses;
6168 dstStages = state.cache.getAllowedStages() & rng.getUint32();
6169 dstAccesses = state.cache.getAllowedAcceses() & rng.getUint32();
6171 // If there are no bits in dst stage mask use all stages
6172 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
6175 srcStages = dstStages;
6177 return de::MovePtr<CmdCommand>(new ImageTransition(srcStages, srcAccesses, dstStages, dstAccesses, srcLayout, dstLayout));
6180 case OP_IMAGE_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyToBuffer());
6181 case OP_IMAGE_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32()));
6182 case OP_IMAGE_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyToImage());
6183 case OP_IMAGE_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32()));
6184 case OP_IMAGE_BLIT_TO_IMAGE:
6186 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6187 return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale));
6190 case OP_IMAGE_BLIT_FROM_IMAGE:
6192 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6193 return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale));
6196 case OP_PIPELINE_BARRIER_GLOBAL:
6197 case OP_PIPELINE_BARRIER_BUFFER:
6198 case OP_PIPELINE_BARRIER_IMAGE:
6200 vk::VkPipelineStageFlags dirtySrcStages;
6201 vk::VkAccessFlags dirtySrcAccesses;
6202 vk::VkPipelineStageFlags dirtyDstStages;
6203 vk::VkAccessFlags dirtyDstAccesses;
6205 vk::VkPipelineStageFlags srcStages;
6206 vk::VkAccessFlags srcAccesses;
6207 vk::VkPipelineStageFlags dstStages;
6208 vk::VkAccessFlags dstAccesses;
6210 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6212 // Try masking some random bits
6213 srcStages = dirtySrcStages & rng.getUint32();
6214 srcAccesses = dirtySrcAccesses & rng.getUint32();
6216 dstStages = dirtyDstStages & rng.getUint32();
6217 dstAccesses = dirtyDstAccesses & rng.getUint32();
6219 // If there are no bits in stage mask use the original dirty stages
6220 srcStages = srcStages ? srcStages : dirtySrcStages;
6221 dstStages = dstStages ? dstStages : dirtyDstStages;
6224 srcStages = dstStages;
6226 PipelineBarrier::Type type;
6228 if (op == OP_PIPELINE_BARRIER_IMAGE)
6229 type = PipelineBarrier::TYPE_IMAGE;
6230 else if (op == OP_PIPELINE_BARRIER_BUFFER)
6231 type = PipelineBarrier::TYPE_BUFFER;
6232 else if (op == OP_PIPELINE_BARRIER_GLOBAL)
6233 type = PipelineBarrier::TYPE_GLOBAL;
6236 type = PipelineBarrier::TYPE_LAST;
6237 DE_FATAL("Unknown op");
6240 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type));
6244 DE_FATAL("Unknown op");
6245 return de::MovePtr<CmdCommand>(DE_NULL);
6249 de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
6255 case OP_RENDER_VERTEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_VERTEX_BUFFER));
6256 case OP_RENDER_INDEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_INDEX_BUFFER));
6259 DE_FATAL("Unknown op");
6260 return de::MovePtr<RenderPassCommand>(DE_NULL);
6264 de::MovePtr<CmdCommand> createRenderPassCommands (const Memory& memory,
6265 de::Random& nextOpRng,
6271 vector<RenderPassCommand*> commands;
6275 for (; opNdx < opCount; opNdx++)
6279 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6281 DE_ASSERT(!ops.empty());
6284 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6286 if (op == OP_RENDERPASS_END)
6292 de::Random rng (state.rng);
6294 commands.push_back(createRenderPassCommand(rng, state, op).release());
6295 applyOp(state, memory, op, usage);
6297 DE_ASSERT(state.rng == rng);
6302 applyOp(state, memory, OP_RENDERPASS_END, usage);
6303 return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
6307 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6308 delete commands[commandNdx];
6314 de::MovePtr<Command> createCmdCommands (const Memory& memory,
6315 de::Random& nextOpRng,
6321 vector<CmdCommand*> commands;
6325 for (; opNdx < opCount; opNdx++)
6329 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6331 DE_ASSERT(!ops.empty());
6334 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6336 if (op == OP_COMMAND_BUFFER_END)
6342 // \note Command needs to known the state before the operation
6343 if (op == OP_RENDERPASS_BEGIN)
6345 applyOp(state, memory, op, usage);
6346 commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6350 de::Random rng (state.rng);
6352 commands.push_back(createCmdCommand(rng, state, op, usage).release());
6353 applyOp(state, memory, op, usage);
6355 DE_ASSERT(state.rng == rng);
6362 applyOp(state, memory, OP_COMMAND_BUFFER_END, usage);
6363 return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
6367 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6368 delete commands[commandNdx];
6374 void createCommands (vector<Command*>& commands,
6376 const Memory& memory,
6378 vk::VkSharingMode sharingMode)
6380 const size_t opCount = 100;
6381 State state (usage, seed);
6382 // Used to select next operation only
6383 de::Random nextOpRng (seed ^ 12930809);
6385 commands.reserve(opCount);
6387 for (size_t opNdx = 0; opNdx < opCount; opNdx++)
6391 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6393 DE_ASSERT(!ops.empty());
6396 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6398 if (op == OP_COMMAND_BUFFER_BEGIN)
6400 applyOp(state, memory, op, usage);
6401 commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6405 de::Random rng (state.rng);
6407 commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
6408 applyOp(state, memory, op, usage);
6410 // Make sure that random generator is in sync
6411 DE_ASSERT(state.rng == rng);
6416 // Clean up resources
6417 if (state.hasBuffer && state.hasImage)
6419 if (!state.queueIdle)
6420 commands.push_back(new QueueWaitIdle());
6422 if (state.hasBuffer)
6423 commands.push_back(new DestroyBuffer());
6426 commands.push_back(new DestroyImage());
6430 void testCommand (TestLog& log,
6431 tcu::ResultCollector& resultCollector,
6432 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection,
6433 const vk::InstanceInterface& vki,
6434 const vk::DeviceInterface& vkd,
6435 vk::VkPhysicalDevice physicalDevice,
6436 vk::VkDevice device,
6437 vk::VkDeviceSize size,
6438 deUint32 memoryTypeIndex,
6440 vk::VkSharingMode sharingMode,
6441 vk::VkQueue executionQueue,
6442 deUint32 executionQueueFamily,
6443 const vector<deUint32>& queueFamilies,
6444 const vk::VkDeviceSize maxBufferSize,
6445 const IVec2 maxImageSize)
6447 const deUint32 seed = 2830980989u;
6448 Memory memory (vki, vkd, physicalDevice, device, size, memoryTypeIndex, maxBufferSize, maxImageSize[0], maxImageSize[1]);
6449 vector<Command*> commands;
6450 vector<pair<deUint32, vk::VkQueue> > queues;
6454 log << TestLog::Message << "Create commands" << TestLog::EndMessage;
6455 createCommands(commands, seed, memory, usage, sharingMode);
6457 for (size_t queueNdx = 0; queueNdx < queueFamilies.size(); queueNdx++)
6461 vkd.getDeviceQueue(device, queueFamilies[queueNdx], 0, &queue);
6462 queues.push_back(std::make_pair(queueFamilies[queueNdx], queue));
6466 const tcu::ScopedLogSection section (log, "LogPrepare", "LogPrepare");
6468 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6469 commands[cmdNdx]->logPrepare(log, cmdNdx);
6473 const tcu::ScopedLogSection section (log, "LogExecute", "LogExecute");
6475 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6476 commands[cmdNdx]->logExecute(log, cmdNdx);
6480 const Context context (vki, vkd, physicalDevice, device, executionQueue, executionQueueFamily, queues, binaryCollection);
6485 PrepareContext prepareContext (context, memory);
6487 log << TestLog::Message << "Begin prepare" << TestLog::EndMessage;
6489 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6491 Command& command = *commands[cmdNdx];
6495 command.prepare(prepareContext);
6499 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare for execution");
6504 ExecuteContext executeContext (context);
6506 log << TestLog::Message << "Begin execution" << TestLog::EndMessage;
6508 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6510 Command& command = *commands[cmdNdx];
6514 command.execute(executeContext);
6518 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute");
6523 VK_CHECK(vkd.deviceWaitIdle(device));
6527 const tcu::ScopedLogSection section (log, "Verify", "Verify");
6528 VerifyContext verifyContext (log, resultCollector, context, size);
6530 log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
6532 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6534 Command& command = *commands[cmdNdx];
6538 command.verify(verifyContext, cmdNdx);
6542 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed verification");
6548 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6550 delete commands[commandNdx];
6551 commands[commandNdx] = DE_NULL;
6556 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6558 delete commands[commandNdx];
6559 commands[commandNdx] = DE_NULL;
6568 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6570 delete commands[commandNdx];
6571 commands[commandNdx] = DE_NULL;
6577 class MemoryTestInstance : public TestInstance
6581 MemoryTestInstance (::vkt::Context& context, const TestConfig& config);
6583 tcu::TestStatus iterate (void);
6586 const TestConfig m_config;
6587 const vk::VkPhysicalDeviceMemoryProperties m_memoryProperties;
6588 deUint32 m_memoryTypeNdx;
6589 tcu::ResultCollector m_resultCollector;
6592 MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
6593 : TestInstance (context)
6595 , m_memoryProperties (vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
6596 , m_memoryTypeNdx (0)
6597 , m_resultCollector (context.getTestContext().getLog())
6599 TestLog& log = context.getTestContext().getLog();
6601 const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
6603 log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
6604 log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
6605 log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
6609 const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
6611 for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
6613 const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
6615 log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
6616 log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
6619 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
6621 const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
6623 log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
6624 log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
6629 tcu::TestStatus MemoryTestInstance::iterate (void)
6631 // \todo [2016-03-09 mika] Split different stages over multiple iterations
6632 if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
6634 TestLog& log = m_context.getTestContext().getLog();
6635 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx), "Memory type " + de::toString(m_memoryTypeNdx));
6636 const vk::InstanceInterface& vki = m_context.getInstanceInterface();
6637 const vk::VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
6638 const vk::DeviceInterface& vkd = m_context.getDeviceInterface();
6639 const vk::VkDevice device = m_context.getDevice();
6640 const vk::VkQueue queue = m_context.getUniversalQueue();
6641 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
6642 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
6643 vector<deUint32> queues;
6645 queues.push_back(queueFamilyIndex);
6647 if (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)
6648 && !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
6650 log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
6653 return tcu::TestStatus::incomplete();
6659 const vk::VkBufferUsageFlags bufferUsage = usageToBufferUsageFlags(m_config.usage);
6660 const vk::VkImageUsageFlags imageUsage = usageToImageUsageFlags(m_config.usage);
6661 const vk::VkDeviceSize maxBufferSize = bufferUsage != 0
6662 ? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
6664 const IVec2 maxImageSize = imageUsage != 0
6665 ? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
6668 log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
6669 log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
6671 // Skip tests if there are no supported operations
6672 if (maxBufferSize == 0
6673 && maxImageSize[0] == 0
6674 && (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)) == 0)
6676 log << TestLog::Message << "Skipping memory type. None of the usages are supported." << TestLog::EndMessage;
6680 testCommand(log, m_resultCollector, m_context.getBinaryCollection(), vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, m_config.usage, m_config.sharing, queue, queueFamilyIndex, queues, maxBufferSize, maxImageSize);
6683 catch (const tcu::TestError& e)
6685 m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
6689 return tcu::TestStatus::incomplete();
6693 return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
6698 void init (vk::SourceCollections& sources, TestConfig config) const
6700 // Vertex buffer rendering
6701 if (config.usage & USAGE_VERTEX_BUFFER)
6703 const char* const vertexShader =
6705 "layout(location = 0) in highp vec2 a_position;\n"
6706 "void main (void) {\n"
6707 "\tgl_PointSize = 1.0;\n"
6708 "\tgl_Position = vec4(1.998 * a_position - vec2(0.999), 0.0, 1.0);\n"
6711 sources.glslSources.add("vertex-buffer.vert")
6712 << glu::VertexSource(vertexShader);
6715 // Index buffer rendering
6716 if (config.usage & USAGE_INDEX_BUFFER)
6718 const char* const vertexShader =
6721 "void main (void) {\n"
6722 "\tgl_PointSize = 1.0;\n"
6723 "\thighp vec2 pos = vec2(gl_VertexIndex % 256, gl_VertexIndex / 256) / vec2(255.0);\n"
6724 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
6727 sources.glslSources.add("index-buffer.vert")
6728 << glu::VertexSource(vertexShader);
6732 const char* const fragmentShader =
6734 "layout(location = 0) out highp vec4 o_color;\n"
6735 "void main (void) {\n"
6736 "\to_color = vec4(1.0);\n"
6739 sources.glslSources.add("render-white.frag")
6740 << glu::FragmentSource(fragmentShader);
6747 tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
6749 de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
6750 const vk::VkDeviceSize sizes[] =
6757 const Usage usages[] =
6763 USAGE_VERTEX_BUFFER,
6766 const Usage readUsages[] =
6770 USAGE_VERTEX_BUFFER,
6774 const Usage writeUsages[] =
6780 for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
6782 const Usage writeUsage = writeUsages[writeUsageNdx];
6784 for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
6786 const Usage readUsage = readUsages[readUsageNdx];
6787 const Usage usage = writeUsage | readUsage;
6788 const string usageGroupName (usageToName(usage));
6789 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6791 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6793 const vk::VkDeviceSize size = sizes[sizeNdx];
6794 const string testName (de::toString((deUint64)(size)));
6795 const TestConfig config =
6799 vk::VK_SHARING_MODE_EXCLUSIVE
6802 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6805 group->addChild(usageGroup.get());
6806 usageGroup.release();
6811 Usage all = (Usage)0;
6813 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
6814 all = all | usages[usageNdx];
6817 const string usageGroupName ("all");
6818 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6820 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6822 const vk::VkDeviceSize size = sizes[sizeNdx];
6823 const string testName (de::toString((deUint64)(size)));
6824 const TestConfig config =
6828 vk::VK_SHARING_MODE_EXCLUSIVE
6831 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6834 group->addChild(usageGroup.get());
6835 usageGroup.release();
6839 const string usageGroupName ("all_device");
6840 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6842 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6844 const vk::VkDeviceSize size = sizes[sizeNdx];
6845 const string testName (de::toString((deUint64)(size)));
6846 const TestConfig config =
6848 (Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
6850 vk::VK_SHARING_MODE_EXCLUSIVE
6853 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6856 group->addChild(usageGroup.get());
6857 usageGroup.release();
6861 return group.release();