1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2015 Google Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
21 * \brief Pipeline barrier tests
22 *//*--------------------------------------------------------------------*/
24 #include "vktMemoryPipelineBarrierTests.hpp"
26 #include "vktTestCaseUtil.hpp"
29 #include "vkPlatform.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkQueryUtil.hpp"
32 #include "vkMemUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkPrograms.hpp"
36 #include "tcuMaybe.hpp"
37 #include "tcuTextureUtil.hpp"
38 #include "tcuTestLog.hpp"
39 #include "tcuResultCollector.hpp"
40 #include "tcuTexture.hpp"
41 #include "tcuImageCompare.hpp"
43 #include "deUniquePtr.hpp"
44 #include "deStringUtil.hpp"
45 #include "deRandom.hpp"
56 // \todo [2016-03-09 mika] Check bufferImageGranularity
70 using tcu::ConstPixelBufferAccess;
71 using tcu::PixelBufferAccess;
72 using tcu::TextureFormat;
73 using tcu::TextureLevel;
83 ALL_PIPELINE_STAGES = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
84 | vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
85 | vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
86 | vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
87 | vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
88 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
89 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
90 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
91 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
92 | vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
93 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
94 | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
95 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
96 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT
97 | vk::VK_PIPELINE_STAGE_HOST_BIT
102 ALL_ACCESSES = vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT
103 | vk::VK_ACCESS_INDEX_READ_BIT
104 | vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
105 | vk::VK_ACCESS_UNIFORM_READ_BIT
106 | vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
107 | vk::VK_ACCESS_SHADER_READ_BIT
108 | vk::VK_ACCESS_SHADER_WRITE_BIT
109 | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
110 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
111 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
112 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
113 | vk::VK_ACCESS_TRANSFER_READ_BIT
114 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
115 | vk::VK_ACCESS_HOST_READ_BIT
116 | vk::VK_ACCESS_HOST_WRITE_BIT
117 | vk::VK_ACCESS_MEMORY_READ_BIT
118 | vk::VK_ACCESS_MEMORY_WRITE_BIT
123 // Mapped host read and write
124 USAGE_HOST_READ = (0x1u<<0),
125 USAGE_HOST_WRITE = (0x1u<<1),
127 // Copy and other transfer operations
128 USAGE_TRANSFER_SRC = (0x1u<<2),
129 USAGE_TRANSFER_DST = (0x1u<<3),
131 // Buffer usage flags
132 USAGE_INDEX_BUFFER = (0x1u<<4),
133 USAGE_VERTEX_BUFFER = (0x1u<<5),
135 USAGE_UNIFORM_BUFFER = (0x1u<<6),
136 USAGE_STORAGE_BUFFER = (0x1u<<7),
138 USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
139 USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
141 // \todo [2016-03-09 mika] This is probably almost impossible to do
142 USAGE_INDIRECT_BUFFER = (0x1u<<10),
144 // Texture usage flags
145 USAGE_TEXTURE_SAMPLED = (0x1u<<11),
146 USAGE_TEXTURE_STORAGE = (0x1u<<12),
147 USAGE_COLOR_ATTACHMENT = (0x1u<<13),
148 USAGE_INPUT_ATTACHMENT = (0x1u<<14),
149 USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
152 bool supportsDeviceBufferWrites (Usage usage)
154 if (usage & USAGE_TRANSFER_DST)
157 if (usage & USAGE_STORAGE_BUFFER)
160 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
166 bool supportsDeviceImageWrites (Usage usage)
168 if (usage & USAGE_TRANSFER_DST)
171 if (usage & USAGE_TEXTURE_STORAGE)
174 if (usage & USAGE_COLOR_ATTACHMENT)
180 // Sequential access enums
183 ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
184 ACCESS_INDEX_READ_BIT,
185 ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
186 ACCESS_UNIFORM_READ_BIT,
187 ACCESS_INPUT_ATTACHMENT_READ_BIT,
188 ACCESS_SHADER_READ_BIT,
189 ACCESS_SHADER_WRITE_BIT,
190 ACCESS_COLOR_ATTACHMENT_READ_BIT,
191 ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
192 ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
193 ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
194 ACCESS_TRANSFER_READ_BIT,
195 ACCESS_TRANSFER_WRITE_BIT,
196 ACCESS_HOST_READ_BIT,
197 ACCESS_HOST_WRITE_BIT,
198 ACCESS_MEMORY_READ_BIT,
199 ACCESS_MEMORY_WRITE_BIT,
204 // Sequential stage enums
207 PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
208 PIPELINESTAGE_BOTTOM_OF_PIPE_BIT,
209 PIPELINESTAGE_DRAW_INDIRECT_BIT,
210 PIPELINESTAGE_VERTEX_INPUT_BIT,
211 PIPELINESTAGE_VERTEX_SHADER_BIT,
212 PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
213 PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
214 PIPELINESTAGE_GEOMETRY_SHADER_BIT,
215 PIPELINESTAGE_FRAGMENT_SHADER_BIT,
216 PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
217 PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
218 PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
219 PIPELINESTAGE_COMPUTE_SHADER_BIT,
220 PIPELINESTAGE_TRANSFER_BIT,
221 PIPELINESTAGE_HOST_BIT,
226 PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
230 case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT: return PIPELINESTAGE_TOP_OF_PIPE_BIT;
231 case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT: return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
232 case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT: return PIPELINESTAGE_DRAW_INDIRECT_BIT;
233 case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT: return PIPELINESTAGE_VERTEX_INPUT_BIT;
234 case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT: return PIPELINESTAGE_VERTEX_SHADER_BIT;
235 case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
236 case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
237 case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT: return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
238 case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT: return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
239 case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
240 case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
241 case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
242 case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: return PIPELINESTAGE_COMPUTE_SHADER_BIT;
243 case vk::VK_PIPELINE_STAGE_TRANSFER_BIT: return PIPELINESTAGE_TRANSFER_BIT;
244 case vk::VK_PIPELINE_STAGE_HOST_BIT: return PIPELINESTAGE_HOST_BIT;
247 DE_FATAL("Unknown pipeline stage flags");
248 return PIPELINESTAGE_LAST;
252 Usage operator| (Usage a, Usage b)
254 return (Usage)((deUint32)a | (deUint32)b);
257 Usage operator& (Usage a, Usage b)
259 return (Usage)((deUint32)a & (deUint32)b);
262 string usageToName (Usage usage)
267 const char* const name;
270 { USAGE_HOST_READ, "host_read" },
271 { USAGE_HOST_WRITE, "host_write" },
273 { USAGE_TRANSFER_SRC, "transfer_src" },
274 { USAGE_TRANSFER_DST, "transfer_dst" },
276 { USAGE_INDEX_BUFFER, "index_buffer" },
277 { USAGE_VERTEX_BUFFER, "vertex_buffer" },
278 { USAGE_UNIFORM_BUFFER, "uniform_buffer" },
279 { USAGE_STORAGE_BUFFER, "storage_buffer" },
280 { USAGE_UNIFORM_TEXEL_BUFFER, "uniform_texel_buffer" },
281 { USAGE_STORAGE_TEXEL_BUFFER, "storage_texel_buffer" },
282 { USAGE_INDIRECT_BUFFER, "indirect_buffer" },
283 { USAGE_TEXTURE_SAMPLED, "sampled_texture" },
284 { USAGE_TEXTURE_STORAGE, "texture_storage" },
285 { USAGE_COLOR_ATTACHMENT, "color_attachment" },
286 { USAGE_INPUT_ATTACHMENT, "input_attachment" },
287 { USAGE_DEPTH_STENCIL_ATTACHMENT, "depth_stencil_attachment" },
290 std::ostringstream stream;
293 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
295 if (usage & usageNames[usageNdx].usage)
302 stream << usageNames[usageNdx].name;
309 vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
311 vk::VkBufferUsageFlags flags = 0;
313 if (usage & USAGE_TRANSFER_SRC)
314 flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
316 if (usage & USAGE_TRANSFER_DST)
317 flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
319 if (usage & USAGE_INDEX_BUFFER)
320 flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
322 if (usage & USAGE_VERTEX_BUFFER)
323 flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
325 if (usage & USAGE_INDIRECT_BUFFER)
326 flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
328 if (usage & USAGE_UNIFORM_BUFFER)
329 flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
331 if (usage & USAGE_STORAGE_BUFFER)
332 flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
334 if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
335 flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
337 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
338 flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
343 vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
345 vk::VkImageUsageFlags flags = 0;
347 if (usage & USAGE_TRANSFER_SRC)
348 flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
350 if (usage & USAGE_TRANSFER_DST)
351 flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
353 if (usage & USAGE_TEXTURE_SAMPLED)
354 flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
356 if (usage & USAGE_TEXTURE_STORAGE)
357 flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
359 if (usage & USAGE_COLOR_ATTACHMENT)
360 flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
362 if (usage & USAGE_INPUT_ATTACHMENT)
363 flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
365 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
366 flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
371 vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
373 vk::VkPipelineStageFlags flags = 0;
375 if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
376 flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
378 if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
379 flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
381 if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
382 flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
384 if (usage & USAGE_INDIRECT_BUFFER)
385 flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
388 (USAGE_UNIFORM_BUFFER
389 | USAGE_STORAGE_BUFFER
390 | USAGE_UNIFORM_TEXEL_BUFFER
391 | USAGE_STORAGE_TEXEL_BUFFER
392 | USAGE_TEXTURE_SAMPLED
393 | USAGE_TEXTURE_STORAGE))
395 flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
396 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
397 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
398 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
399 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
400 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
403 if (usage & USAGE_INPUT_ATTACHMENT)
404 flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
406 if (usage & USAGE_COLOR_ATTACHMENT)
407 flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
409 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
411 flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
412 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
418 vk::VkAccessFlags usageToAccessFlags (Usage usage)
420 vk::VkAccessFlags flags = 0;
422 if (usage & USAGE_HOST_READ)
423 flags |= vk::VK_ACCESS_HOST_READ_BIT;
425 if (usage & USAGE_HOST_WRITE)
426 flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
428 if (usage & USAGE_TRANSFER_SRC)
429 flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
431 if (usage & USAGE_TRANSFER_DST)
432 flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
434 if (usage & USAGE_INDEX_BUFFER)
435 flags |= vk::VK_ACCESS_INDEX_READ_BIT;
437 if (usage & USAGE_VERTEX_BUFFER)
438 flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
440 if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
441 flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
443 if (usage & (USAGE_STORAGE_BUFFER
444 | USAGE_STORAGE_TEXEL_BUFFER
445 | USAGE_TEXTURE_SAMPLED
446 | USAGE_TEXTURE_STORAGE))
447 flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
449 if (usage & USAGE_INDIRECT_BUFFER)
450 flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
452 if (usage & USAGE_COLOR_ATTACHMENT)
453 flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
455 if (usage & USAGE_INPUT_ATTACHMENT)
456 flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
458 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
459 flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
460 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
468 vk::VkDeviceSize size;
469 vk::VkSharingMode sharing;
472 vk::Move<vk::VkCommandBuffer> createCommandBuffer (const vk::DeviceInterface& vkd,
474 vk::VkCommandPool pool,
475 vk::VkCommandBufferLevel level)
477 const vk::VkCommandBufferAllocateInfo bufferInfo =
479 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
487 return vk::allocateCommandBuffer(vkd, device, &bufferInfo);
490 vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface& vkd,
492 vk::VkCommandPool pool,
493 vk::VkCommandBufferLevel level)
495 const vk::VkCommandBufferInheritanceInfo inheritInfo =
497 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
506 const vk::VkCommandBufferBeginInfo beginInfo =
508 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
511 (level == vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY ? &inheritInfo : (const vk::VkCommandBufferInheritanceInfo*)DE_NULL),
514 vk::Move<vk::VkCommandBuffer> commandBuffer (createCommandBuffer(vkd, device, pool, level));
516 vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
518 return commandBuffer;
521 vk::Move<vk::VkCommandPool> createCommandPool (const vk::DeviceInterface& vkd,
523 deUint32 queueFamilyIndex)
525 const vk::VkCommandPoolCreateInfo poolInfo =
527 vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
530 vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
534 return vk::createCommandPool(vkd, device, &poolInfo);
537 vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface& vkd,
539 vk::VkDeviceSize size,
540 vk::VkBufferUsageFlags usage,
541 vk::VkSharingMode sharingMode,
542 const vector<deUint32>& queueFamilies)
544 const vk::VkBufferCreateInfo createInfo =
546 vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
553 (deUint32)queueFamilies.size(),
557 return vk::createBuffer(vkd, device, &createInfo);
560 vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface& vkd,
562 vk::VkDeviceSize size,
563 deUint32 memoryTypeIndex)
565 const vk::VkMemoryAllocateInfo alloc =
567 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
574 return vk::allocateMemory(vkd, device, &alloc);
577 vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface& vki,
578 const vk::DeviceInterface& vkd,
579 vk::VkPhysicalDevice physicalDevice,
582 vk::VkMemoryPropertyFlags properties)
584 const vk::VkMemoryRequirements memoryRequirements = vk::getBufferMemoryRequirements(vkd, device, buffer);
585 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
586 deUint32 memoryTypeIndex;
588 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
590 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
591 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
595 const vk::VkMemoryAllocateInfo allocationInfo =
597 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
599 memoryRequirements.size,
602 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
604 VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
608 catch (const vk::Error& error)
610 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
611 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
613 // Try next memory type/heap if out of memory
617 // Throw all other errors forward
624 TCU_FAIL("Failed to allocate memory for buffer");
627 vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface& vki,
628 const vk::DeviceInterface& vkd,
629 vk::VkPhysicalDevice physicalDevice,
632 vk::VkMemoryPropertyFlags properties)
634 const vk::VkMemoryRequirements memoryRequirements = vk::getImageMemoryRequirements(vkd, device, image);
635 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
636 deUint32 memoryTypeIndex;
638 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
640 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
641 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
645 const vk::VkMemoryAllocateInfo allocationInfo =
647 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
649 memoryRequirements.size,
652 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
654 VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
658 catch (const vk::Error& error)
660 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
661 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
663 // Try next memory type/heap if out of memory
667 // Throw all other errors forward
674 TCU_FAIL("Failed to allocate memory for image");
677 void queueRun (const vk::DeviceInterface& vkd,
679 vk::VkCommandBuffer commandBuffer)
681 const vk::VkSubmitInfo submitInfo =
683 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
688 (const vk::VkPipelineStageFlags*)DE_NULL,
697 VK_CHECK(vkd.queueSubmit(queue, 1, &submitInfo, 0));
698 VK_CHECK(vkd.queueWaitIdle(queue));
701 void* mapMemory (const vk::DeviceInterface& vkd,
703 vk::VkDeviceMemory memory,
704 vk::VkDeviceSize size)
708 VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
713 class ReferenceMemory
716 ReferenceMemory (size_t size);
718 void set (size_t pos, deUint8 val);
719 deUint8 get (size_t pos) const;
720 bool isDefined (size_t pos) const;
722 void setDefined (size_t offset, size_t size, const void* data);
723 void setUndefined (size_t offset, size_t size);
724 void setData (size_t offset, size_t size, const void* data);
726 size_t getSize (void) const { return m_data.size(); }
729 vector<deUint8> m_data;
730 vector<deUint64> m_defined;
733 ReferenceMemory::ReferenceMemory (size_t size)
735 , m_defined (size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
739 void ReferenceMemory::set (size_t pos, deUint8 val)
742 m_defined[pos / 64] |= 0x1ull << (pos % 64);
745 void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
747 const deUint8* data = (const deUint8*)data_;
749 // \todo [2016-03-09 mika] Optimize
750 for (size_t pos = 0; pos < size; pos++)
752 m_data[offset + pos] = data[pos];
753 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
757 void ReferenceMemory::setUndefined (size_t offset, size_t size)
759 // \todo [2016-03-09 mika] Optimize
760 for (size_t pos = 0; pos < size; pos++)
761 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
764 deUint8 ReferenceMemory::get (size_t pos) const
766 DE_ASSERT(isDefined(pos));
770 bool ReferenceMemory::isDefined (size_t pos) const
772 return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
778 Memory (const vk::InstanceInterface& vki,
779 const vk::DeviceInterface& vkd,
780 vk::VkPhysicalDevice physicalDevice,
782 vk::VkDeviceSize size,
783 deUint32 memoryTypeIndex,
784 vk::VkDeviceSize maxBufferSize,
785 deInt32 maxImageWidth,
786 deInt32 maxImageHeight);
788 vk::VkDeviceSize getSize (void) const { return m_size; }
789 vk::VkDeviceSize getMaxBufferSize (void) const { return m_maxBufferSize; }
790 bool getSupportBuffers (void) const { return m_maxBufferSize > 0; }
792 deInt32 getMaxImageWidth (void) const { return m_maxImageWidth; }
793 deInt32 getMaxImageHeight (void) const { return m_maxImageHeight; }
794 bool getSupportImages (void) const { return m_maxImageWidth > 0; }
796 const vk::VkMemoryType& getMemoryType (void) const { return m_memoryType; }
797 deUint32 getMemoryTypeIndex (void) const { return m_memoryTypeIndex; }
798 vk::VkDeviceMemory getMemory (void) const { return *m_memory; }
801 const vk::VkDeviceSize m_size;
802 const deUint32 m_memoryTypeIndex;
803 const vk::VkMemoryType m_memoryType;
804 const vk::Unique<vk::VkDeviceMemory> m_memory;
805 const vk::VkDeviceSize m_maxBufferSize;
806 const deInt32 m_maxImageWidth;
807 const deInt32 m_maxImageHeight;
810 vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface& vki,
811 vk::VkPhysicalDevice device,
812 deUint32 memoryTypeIndex)
814 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
816 DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
818 return memoryProperties.memoryTypes[memoryTypeIndex];
821 vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface& vkd,
824 vk::VkBufferUsageFlags usage,
825 vk::VkSharingMode sharingMode,
826 const vector<deUint32>& queueFamilies,
828 vk::VkDeviceSize memorySize,
829 deUint32 memoryTypeIndex)
831 vk::VkDeviceSize lastSuccess = 0;
832 vk::VkDeviceSize currentSize = memorySize / 2;
835 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
836 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
838 if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
842 for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
844 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
845 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
847 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
849 lastSuccess = currentSize;
850 currentSize += stepSize;
853 currentSize -= stepSize;
862 // Round size down maximum W * H * 4, where W and H < 4096
863 vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
865 const vk::VkDeviceSize maxTextureSize = 4096;
866 vk::VkDeviceSize maxTexelCount = size / 4;
867 vk::VkDeviceSize bestW = de::max(maxTexelCount, maxTextureSize);
868 vk::VkDeviceSize bestH = maxTexelCount / bestW;
870 // \todo [2016-03-09 mika] Could probably be faster?
871 for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
873 const vk::VkDeviceSize h = maxTexelCount / w;
875 if (bestW * bestH < w * h)
882 return bestW * bestH * 4;
885 // Find RGBA8 image size that has exactly "size" of number of bytes.
886 // "size" must be W * H * 4 where W and H < 4096
887 IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
889 const vk::VkDeviceSize maxTextureSize = 4096;
890 vk::VkDeviceSize texelCount = size / 4;
892 DE_ASSERT((size % 4) == 0);
894 // \todo [2016-03-09 mika] Could probably be faster?
895 for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
897 const vk::VkDeviceSize h = texelCount / w;
899 if ((texelCount % w) == 0 && h < maxTextureSize)
900 return IVec2((int)w, (int)h);
903 DE_FATAL("Invalid size");
904 return IVec2(-1, -1);
907 IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface& vkd,
910 vk::VkImageUsageFlags usage,
911 vk::VkSharingMode sharingMode,
912 const vector<deUint32>& queueFamilies,
914 vk::VkDeviceSize memorySize,
915 deUint32 memoryTypeIndex)
917 IVec2 lastSuccess (0);
921 const deUint32 texelCount = (deUint32)(memorySize / 4);
922 const deUint32 width = (deUint32)deFloatSqrt((float)texelCount);
923 const deUint32 height = texelCount / width;
925 currentSize[0] = deMaxu32(width, height);
926 currentSize[1] = deMinu32(width, height);
929 for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
931 const vk::VkImageCreateInfo createInfo =
933 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
937 vk::VK_IMAGE_TYPE_2D,
938 vk::VK_FORMAT_R8G8B8A8_UNORM,
940 (deUint32)currentSize[0],
941 (deUint32)currentSize[1],
945 vk::VK_SAMPLE_COUNT_1_BIT,
946 vk::VK_IMAGE_TILING_OPTIMAL,
949 (deUint32)queueFamilies.size(),
951 vk::VK_IMAGE_LAYOUT_UNDEFINED
953 const vk::Unique<vk::VkImage> image (vk::createImage(vkd, device, &createInfo));
954 const vk::VkMemoryRequirements requirements (vk::getImageMemoryRequirements(vkd, device, *image));
956 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
958 lastSuccess = currentSize;
959 currentSize[0] += stepSize;
960 currentSize[1] += stepSize;
964 currentSize[0] -= stepSize;
965 currentSize[1] -= stepSize;
975 Memory::Memory (const vk::InstanceInterface& vki,
976 const vk::DeviceInterface& vkd,
977 vk::VkPhysicalDevice physicalDevice,
979 vk::VkDeviceSize size,
980 deUint32 memoryTypeIndex,
981 vk::VkDeviceSize maxBufferSize,
982 deInt32 maxImageWidth,
983 deInt32 maxImageHeight)
985 , m_memoryTypeIndex (memoryTypeIndex)
986 , m_memoryType (getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
987 , m_memory (allocMemory(vkd, device, size, memoryTypeIndex))
988 , m_maxBufferSize (maxBufferSize)
989 , m_maxImageWidth (maxImageWidth)
990 , m_maxImageHeight (maxImageHeight)
997 Context (const vk::InstanceInterface& vki,
998 const vk::DeviceInterface& vkd,
999 vk::VkPhysicalDevice physicalDevice,
1000 vk::VkDevice device,
1002 deUint32 queueFamilyIndex,
1003 const vector<pair<deUint32, vk::VkQueue> >& queues,
1004 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection)
1007 , m_physicalDevice (physicalDevice)
1010 , m_queueFamilyIndex (queueFamilyIndex)
1012 , m_commandPool (createCommandPool(vkd, device, queueFamilyIndex))
1013 , m_binaryCollection (binaryCollection)
1015 for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
1016 m_queueFamilies.push_back(m_queues[queueNdx].first);
1019 const vk::InstanceInterface& getInstanceInterface (void) const { return m_vki; }
1020 vk::VkPhysicalDevice getPhysicalDevice (void) const { return m_physicalDevice; }
1021 vk::VkDevice getDevice (void) const { return m_device; }
1022 const vk::DeviceInterface& getDeviceInterface (void) const { return m_vkd; }
1023 vk::VkQueue getQueue (void) const { return m_queue; }
1024 deUint32 getQueueFamily (void) const { return m_queueFamilyIndex; }
1025 const vector<pair<deUint32, vk::VkQueue> >& getQueues (void) const { return m_queues; }
1026 const vector<deUint32> getQueueFamilies (void) const { return m_queueFamilies; }
1027 vk::VkCommandPool getCommandPool (void) const { return *m_commandPool; }
1028 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_binaryCollection; }
1031 const vk::InstanceInterface& m_vki;
1032 const vk::DeviceInterface& m_vkd;
1033 const vk::VkPhysicalDevice m_physicalDevice;
1034 const vk::VkDevice m_device;
1035 const vk::VkQueue m_queue;
1036 const deUint32 m_queueFamilyIndex;
1037 const vector<pair<deUint32, vk::VkQueue> >& m_queues;
1038 const vk::Unique<vk::VkCommandPool> m_commandPool;
1039 const vk::ProgramCollection<vk::ProgramBinary>& m_binaryCollection;
1040 vector<deUint32> m_queueFamilies;
1043 class PrepareContext
1046 PrepareContext (const Context& context,
1047 const Memory& memory)
1048 : m_context (context)
1053 const Memory& getMemory (void) const { return m_memory; }
1054 const Context& getContext (void) const { return m_context; }
1055 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
1057 void setBuffer (vk::Move<vk::VkBuffer> buffer,
1058 vk::VkDeviceSize size)
1060 DE_ASSERT(!m_currentImage);
1061 DE_ASSERT(!m_currentBuffer);
1063 m_currentBuffer = buffer;
1064 m_currentBufferSize = size;
1067 vk::VkBuffer getBuffer (void) const { return *m_currentBuffer; }
1068 vk::VkDeviceSize getBufferSize (void) const
1070 DE_ASSERT(m_currentBuffer);
1071 return m_currentBufferSize;
1074 void releaseBuffer (void) { m_currentBuffer.disown(); }
1076 void setImage (vk::Move<vk::VkImage> image,
1077 vk::VkImageLayout layout,
1078 vk::VkDeviceSize memorySize,
1082 DE_ASSERT(!m_currentImage);
1083 DE_ASSERT(!m_currentBuffer);
1085 m_currentImage = image;
1086 m_currentImageMemorySize = memorySize;
1087 m_currentImageLayout = layout;
1088 m_currentImageWidth = width;
1089 m_currentImageHeight = height;
1092 void setImageLayout (vk::VkImageLayout layout)
1094 DE_ASSERT(m_currentImage);
1095 m_currentImageLayout = layout;
1098 vk::VkImage getImage (void) const { return *m_currentImage; }
1099 deInt32 getImageWidth (void) const
1101 DE_ASSERT(m_currentImage);
1102 return m_currentImageWidth;
1104 deInt32 getImageHeight (void) const
1106 DE_ASSERT(m_currentImage);
1107 return m_currentImageHeight;
1109 vk::VkDeviceSize getImageMemorySize (void) const
1111 DE_ASSERT(m_currentImage);
1112 return m_currentImageMemorySize;
1115 void releaseImage (void) { m_currentImage.disown(); }
1117 vk::VkImageLayout getImageLayout (void) const
1119 DE_ASSERT(m_currentImage);
1120 return m_currentImageLayout;
1124 const Context& m_context;
1125 const Memory& m_memory;
1127 vk::Move<vk::VkBuffer> m_currentBuffer;
1128 vk::VkDeviceSize m_currentBufferSize;
1130 vk::Move<vk::VkImage> m_currentImage;
1131 vk::VkDeviceSize m_currentImageMemorySize;
1132 vk::VkImageLayout m_currentImageLayout;
1133 deInt32 m_currentImageWidth;
1134 deInt32 m_currentImageHeight;
1137 class ExecuteContext
1140 ExecuteContext (const Context& context)
1141 : m_context (context)
1145 const Context& getContext (void) const { return m_context; }
1146 void setMapping (void* ptr) { m_mapping = ptr; }
1147 void* getMapping (void) const { return m_mapping; }
1150 const Context& m_context;
1157 VerifyContext (TestLog& log,
1158 tcu::ResultCollector& resultCollector,
1159 const Context& context,
1160 vk::VkDeviceSize size)
1162 , m_resultCollector (resultCollector)
1163 , m_context (context)
1164 , m_reference ((size_t)size)
1168 const Context& getContext (void) const { return m_context; }
1169 TestLog& getLog (void) const { return m_log; }
1170 tcu::ResultCollector& getResultCollector (void) const { return m_resultCollector; }
1172 ReferenceMemory& getReference (void) { return m_reference; }
1173 TextureLevel& getReferenceImage (void) { return m_referenceImage;}
1177 tcu::ResultCollector& m_resultCollector;
1178 const Context& m_context;
1179 ReferenceMemory m_reference;
1180 TextureLevel m_referenceImage;
1186 // Constructor should allocate all non-vulkan resources.
1187 virtual ~Command (void) {}
1189 // Get name of the command
1190 virtual const char* getName (void) const = 0;
1192 // Log prepare operations
1193 virtual void logPrepare (TestLog&, size_t) const {}
1194 // Log executed operations
1195 virtual void logExecute (TestLog&, size_t) const {}
1197 // Prepare should allocate all vulkan resources and resources that require
1198 // that buffer or memory has been already allocated. This should build all
1199 // command buffers etc.
1200 virtual void prepare (PrepareContext&) {}
1202 // Execute command. Write or read mapped memory, submit commands to queue
1204 virtual void execute (ExecuteContext&) {}
1206 // Verify that results are correct.
1207 virtual void verify (VerifyContext&, size_t) {}
1210 // Allow only inheritance
1215 Command (const Command&);
1216 Command& operator& (const Command&);
1219 class Map : public Command
1224 const char* getName (void) const { return "Map"; }
1227 void logExecute (TestLog& log, size_t commandIndex) const
1229 log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
1232 void prepare (PrepareContext& context)
1234 m_memory = context.getMemory().getMemory();
1235 m_size = context.getMemory().getSize();
1238 void execute (ExecuteContext& context)
1240 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1241 const vk::VkDevice device = context.getContext().getDevice();
1243 context.setMapping(mapMemory(vkd, device, m_memory, m_size));
1247 vk::VkDeviceMemory m_memory;
1248 vk::VkDeviceSize m_size;
1251 class UnMap : public Command
1256 const char* getName (void) const { return "UnMap"; }
1258 void logExecute (TestLog& log, size_t commandIndex) const
1260 log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
1263 void prepare (PrepareContext& context)
1265 m_memory = context.getMemory().getMemory();
1268 void execute (ExecuteContext& context)
1270 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1271 const vk::VkDevice device = context.getContext().getDevice();
1273 vkd.unmapMemory(device, m_memory);
1274 context.setMapping(DE_NULL);
1278 vk::VkDeviceMemory m_memory;
1281 class Invalidate : public Command
1284 Invalidate (void) {}
1285 ~Invalidate (void) {}
1286 const char* getName (void) const { return "Invalidate"; }
1288 void logExecute (TestLog& log, size_t commandIndex) const
1290 log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
1293 void prepare (PrepareContext& context)
1295 m_memory = context.getMemory().getMemory();
1296 m_size = context.getMemory().getSize();
1299 void execute (ExecuteContext& context)
1301 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1302 const vk::VkDevice device = context.getContext().getDevice();
1304 vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1308 vk::VkDeviceMemory m_memory;
1309 vk::VkDeviceSize m_size;
1312 class Flush : public Command
1317 const char* getName (void) const { return "Flush"; }
1319 void logExecute (TestLog& log, size_t commandIndex) const
1321 log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
1324 void prepare (PrepareContext& context)
1326 m_memory = context.getMemory().getMemory();
1327 m_size = context.getMemory().getSize();
1330 void execute (ExecuteContext& context)
1332 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1333 const vk::VkDevice device = context.getContext().getDevice();
1335 vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1339 vk::VkDeviceMemory m_memory;
1340 vk::VkDeviceSize m_size;
1343 // Host memory reads and writes
1344 class HostMemoryAccess : public Command
1347 HostMemoryAccess (bool read, bool write, deUint32 seed);
1348 ~HostMemoryAccess (void) {}
1349 const char* getName (void) const { return "HostMemoryAccess"; }
1351 void logExecute (TestLog& log, size_t commandIndex) const;
1352 void prepare (PrepareContext& context);
1353 void execute (ExecuteContext& context);
1355 void verify (VerifyContext& context, size_t commandIndex);
1360 const deUint32 m_seed;
1363 vector<deUint8> m_readData;
1366 HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
1373 void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
1375 log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "") << ", seed: " << m_seed << TestLog::EndMessage;
1378 void HostMemoryAccess::prepare (PrepareContext& context)
1380 m_size = (size_t)context.getMemory().getSize();
1383 m_readData.resize(m_size, 0);
1386 void HostMemoryAccess::execute (ExecuteContext& context)
1388 de::Random rng (m_seed);
1389 deUint8* const ptr = (deUint8*)context.getMapping();
1391 if (m_read && m_write)
1393 for (size_t pos = 0; pos < m_size; pos++)
1395 const deUint8 mask = rng.getUint8();
1396 const deUint8 value = ptr[pos];
1398 m_readData[pos] = value;
1399 ptr[pos] = value ^ mask;
1404 for (size_t pos = 0; pos < m_size; pos++)
1406 const deUint8 value = ptr[pos];
1408 m_readData[pos] = value;
1413 for (size_t pos = 0; pos < m_size; pos++)
1415 const deUint8 value = rng.getUint8();
1421 DE_FATAL("Host memory access without read or write.");
1424 void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
1426 tcu::ResultCollector& resultCollector = context.getResultCollector();
1427 ReferenceMemory& reference = context.getReference();
1428 de::Random rng (m_seed);
1430 if (m_read && m_write)
1432 for (size_t pos = 0; pos < m_size; pos++)
1434 const deUint8 mask = rng.getUint8();
1435 const deUint8 value = m_readData[pos];
1437 if (reference.isDefined(pos))
1439 if (value != reference.get(pos))
1441 resultCollector.fail(
1442 de::toString(commandIndex) + ":" + getName()
1443 + " Result differs from reference, Expected: "
1444 + de::toString(tcu::toHex<8>(reference.get(pos)))
1446 + de::toString(tcu::toHex<8>(value))
1448 + de::toString(pos));
1452 reference.set(pos, reference.get(pos) ^ mask);
1458 for (size_t pos = 0; pos < m_size; pos++)
1460 const deUint8 value = m_readData[pos];
1462 if (reference.isDefined(pos))
1464 if (value != reference.get(pos))
1466 resultCollector.fail(
1467 de::toString(commandIndex) + ":" + getName()
1468 + " Result differs from reference, Expected: "
1469 + de::toString(tcu::toHex<8>(reference.get(pos)))
1471 + de::toString(tcu::toHex<8>(value))
1473 + de::toString(pos));
1481 for (size_t pos = 0; pos < m_size; pos++)
1483 const deUint8 value = rng.getUint8();
1485 reference.set(pos, value);
1489 DE_FATAL("Host memory access without read or write.");
1492 class CreateBuffer : public Command
1495 CreateBuffer (vk::VkBufferUsageFlags usage,
1496 vk::VkSharingMode sharing);
1497 ~CreateBuffer (void) {}
1498 const char* getName (void) const { return "CreateBuffer"; }
1500 void logPrepare (TestLog& log, size_t commandIndex) const;
1501 void prepare (PrepareContext& context);
1504 const vk::VkBufferUsageFlags m_usage;
1505 const vk::VkSharingMode m_sharing;
1508 CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags usage,
1509 vk::VkSharingMode sharing)
1511 , m_sharing (sharing)
1515 void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1517 log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
1520 void CreateBuffer::prepare (PrepareContext& context)
1522 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1523 const vk::VkDevice device = context.getContext().getDevice();
1524 const vk::VkDeviceSize bufferSize = context.getMemory().getMaxBufferSize();
1525 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1527 context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
1530 class DestroyBuffer : public Command
1533 DestroyBuffer (void);
1534 ~DestroyBuffer (void) {}
1535 const char* getName (void) const { return "DestroyBuffer"; }
1537 void logExecute (TestLog& log, size_t commandIndex) const;
1538 void prepare (PrepareContext& context);
1539 void execute (ExecuteContext& context);
1542 vk::Move<vk::VkBuffer> m_buffer;
1545 DestroyBuffer::DestroyBuffer (void)
1549 void DestroyBuffer::prepare (PrepareContext& context)
1551 m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1552 context.releaseBuffer();
1555 void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
1557 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
1560 void DestroyBuffer::execute (ExecuteContext& context)
1562 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1563 const vk::VkDevice device = context.getContext().getDevice();
1565 vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
1568 class BindBufferMemory : public Command
1571 BindBufferMemory (void) {}
1572 ~BindBufferMemory (void) {}
1573 const char* getName (void) const { return "BindBufferMemory"; }
1575 void logPrepare (TestLog& log, size_t commandIndex) const;
1576 void prepare (PrepareContext& context);
1579 void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
1581 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
1584 void BindBufferMemory::prepare (PrepareContext& context)
1586 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1587 const vk::VkDevice device = context.getContext().getDevice();
1589 VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
1592 class CreateImage : public Command
1595 CreateImage (vk::VkImageUsageFlags usage,
1596 vk::VkSharingMode sharing);
1597 ~CreateImage (void) {}
1598 const char* getName (void) const { return "CreateImage"; }
1600 void logPrepare (TestLog& log, size_t commandIndex) const;
1601 void prepare (PrepareContext& context);
1602 void verify (VerifyContext& context, size_t commandIndex);
1605 const vk::VkImageUsageFlags m_usage;
1606 const vk::VkSharingMode m_sharing;
1607 deInt32 m_imageWidth;
1608 deInt32 m_imageHeight;
1611 CreateImage::CreateImage (vk::VkImageUsageFlags usage,
1612 vk::VkSharingMode sharing)
1614 , m_sharing (sharing)
1618 void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
1620 log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage) << TestLog::EndMessage;
1623 void CreateImage::prepare (PrepareContext& context)
1625 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1626 const vk::VkDevice device = context.getContext().getDevice();
1627 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1629 m_imageWidth = context.getMemory().getMaxImageWidth();
1630 m_imageHeight = context.getMemory().getMaxImageHeight();
1633 const vk::VkImageCreateInfo createInfo =
1635 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1639 vk::VK_IMAGE_TYPE_2D,
1640 vk::VK_FORMAT_R8G8B8A8_UNORM,
1642 (deUint32)m_imageWidth,
1643 (deUint32)m_imageHeight,
1647 vk::VK_SAMPLE_COUNT_1_BIT,
1648 vk::VK_IMAGE_TILING_OPTIMAL,
1651 (deUint32)queueFamilies.size(),
1653 vk::VK_IMAGE_LAYOUT_UNDEFINED
1655 vk::Move<vk::VkImage> image (createImage(vkd, device, &createInfo));
1656 const vk::VkMemoryRequirements requirements = vk::getImageMemoryRequirements(vkd, device, *image);
1658 context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
1662 void CreateImage::verify (VerifyContext& context, size_t)
1664 context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
1667 class DestroyImage : public Command
1670 DestroyImage (void);
1671 ~DestroyImage (void) {}
1672 const char* getName (void) const { return "DestroyImage"; }
1674 void logExecute (TestLog& log, size_t commandIndex) const;
1675 void prepare (PrepareContext& context);
1676 void execute (ExecuteContext& context);
1679 vk::Move<vk::VkImage> m_image;
1682 DestroyImage::DestroyImage (void)
1686 void DestroyImage::prepare (PrepareContext& context)
1688 m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1689 context.releaseImage();
1693 void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
1695 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
1698 void DestroyImage::execute (ExecuteContext& context)
1700 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1701 const vk::VkDevice device = context.getContext().getDevice();
1703 vkd.destroyImage(device, m_image.disown(), DE_NULL);
1706 class BindImageMemory : public Command
1709 BindImageMemory (void) {}
1710 ~BindImageMemory (void) {}
1711 const char* getName (void) const { return "BindImageMemory"; }
1713 void logPrepare (TestLog& log, size_t commandIndex) const;
1714 void prepare (PrepareContext& context);
1717 void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
1719 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
1722 void BindImageMemory::prepare (PrepareContext& context)
1724 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1725 const vk::VkDevice device = context.getContext().getDevice();
1727 VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
1730 class QueueWaitIdle : public Command
1733 QueueWaitIdle (void) {}
1734 ~QueueWaitIdle (void) {}
1735 const char* getName (void) const { return "QueuetWaitIdle"; }
1737 void logExecute (TestLog& log, size_t commandIndex) const;
1738 void execute (ExecuteContext& context);
1741 void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1743 log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
1746 void QueueWaitIdle::execute (ExecuteContext& context)
1748 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1749 const vk::VkQueue queue = context.getContext().getQueue();
1751 VK_CHECK(vkd.queueWaitIdle(queue));
1754 class DeviceWaitIdle : public Command
1757 DeviceWaitIdle (void) {}
1758 ~DeviceWaitIdle (void) {}
1759 const char* getName (void) const { return "DeviceWaitIdle"; }
1761 void logExecute (TestLog& log, size_t commandIndex) const;
1762 void execute (ExecuteContext& context);
1765 void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1767 log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
1770 void DeviceWaitIdle::execute (ExecuteContext& context)
1772 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1773 const vk::VkDevice device = context.getContext().getDevice();
1775 VK_CHECK(vkd.deviceWaitIdle(device));
1781 SubmitContext (const PrepareContext& context,
1782 const vk::VkCommandBuffer commandBuffer)
1783 : m_context (context)
1784 , m_commandBuffer (commandBuffer)
1788 const Memory& getMemory (void) const { return m_context.getMemory(); }
1789 const Context& getContext (void) const { return m_context.getContext(); }
1790 vk::VkCommandBuffer getCommandBuffer (void) const { return m_commandBuffer; }
1792 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
1793 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
1795 vk::VkImage getImage (void) const { return m_context.getImage(); }
1796 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
1797 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
1800 const PrepareContext& m_context;
1801 const vk::VkCommandBuffer m_commandBuffer;
1807 virtual ~CmdCommand (void) {}
1808 virtual const char* getName (void) const = 0;
1810 // Log things that are done during prepare
1811 virtual void logPrepare (TestLog&, size_t) const {}
1812 // Log submitted calls etc.
1813 virtual void logSubmit (TestLog&, size_t) const {}
1815 // Allocate vulkan resources and prepare for submit.
1816 virtual void prepare (PrepareContext&) {}
1818 // Submit commands to command buffer.
1819 virtual void submit (SubmitContext&) {}
1822 virtual void verify (VerifyContext&, size_t) {}
1825 class SubmitCommandBuffer : public Command
1828 SubmitCommandBuffer (const vector<CmdCommand*>& commands);
1829 ~SubmitCommandBuffer (void);
1831 const char* getName (void) const { return "SubmitCommandBuffer"; }
1832 void logExecute (TestLog& log, size_t commandIndex) const;
1833 void logPrepare (TestLog& log, size_t commandIndex) const;
1835 // Allocate command buffer and submit commands to command buffer
1836 void prepare (PrepareContext& context);
1837 void execute (ExecuteContext& context);
1839 // Verify that results are correct.
1840 void verify (VerifyContext& context, size_t commandIndex);
1843 vector<CmdCommand*> m_commands;
1844 vk::Move<vk::VkCommandBuffer> m_commandBuffer;
1847 SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
1848 : m_commands (commands)
1852 SubmitCommandBuffer::~SubmitCommandBuffer (void)
1854 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1855 delete m_commands[cmdNdx];
1858 void SubmitCommandBuffer::prepare (PrepareContext& context)
1860 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1861 const vk::VkDevice device = context.getContext().getDevice();
1862 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
1864 m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1866 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1868 CmdCommand& command = *m_commands[cmdNdx];
1870 command.prepare(context);
1874 SubmitContext submitContext (context, *m_commandBuffer);
1876 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1878 CmdCommand& command = *m_commands[cmdNdx];
1880 command.submit(submitContext);
1883 VK_CHECK(vkd.endCommandBuffer(*m_commandBuffer));
1887 void SubmitCommandBuffer::execute (ExecuteContext& context)
1889 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1890 const vk::VkCommandBuffer cmd = *m_commandBuffer;
1891 const vk::VkQueue queue = context.getContext().getQueue();
1892 const vk::VkSubmitInfo submit =
1894 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
1899 (const vk::VkPipelineStageFlags*)DE_NULL,
1908 vkd.queueSubmit(queue, 1, &submit, 0);
1911 void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
1913 const string sectionName (de::toString(commandIndex) + ":" + getName());
1914 const tcu::ScopedLogSection section (context.getLog(), sectionName, sectionName);
1916 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1917 m_commands[cmdNdx]->verify(context, cmdNdx);
1920 void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1922 const string sectionName (de::toString(commandIndex) + ":" + getName());
1923 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1925 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1926 m_commands[cmdNdx]->logPrepare(log, cmdNdx);
1929 void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
1931 const string sectionName (de::toString(commandIndex) + ":" + getName());
1932 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1934 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1935 m_commands[cmdNdx]->logSubmit(log, cmdNdx);
1938 class PipelineBarrier : public CmdCommand
1948 PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1949 const vk::VkAccessFlags srcAccesses,
1950 const vk::VkPipelineStageFlags dstStages,
1951 const vk::VkAccessFlags dstAccesses,
1953 const tcu::Maybe<vk::VkImageLayout> imageLayout);
1954 ~PipelineBarrier (void) {}
1955 const char* getName (void) const { return "PipelineBarrier"; }
1957 void logSubmit (TestLog& log, size_t commandIndex) const;
1958 void submit (SubmitContext& context);
1961 const vk::VkPipelineStageFlags m_srcStages;
1962 const vk::VkAccessFlags m_srcAccesses;
1963 const vk::VkPipelineStageFlags m_dstStages;
1964 const vk::VkAccessFlags m_dstAccesses;
1966 const tcu::Maybe<vk::VkImageLayout> m_imageLayout;
1969 PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1970 const vk::VkAccessFlags srcAccesses,
1971 const vk::VkPipelineStageFlags dstStages,
1972 const vk::VkAccessFlags dstAccesses,
1974 const tcu::Maybe<vk::VkImageLayout> imageLayout)
1975 : m_srcStages (srcStages)
1976 , m_srcAccesses (srcAccesses)
1977 , m_dstStages (dstStages)
1978 , m_dstAccesses (dstAccesses)
1980 , m_imageLayout (imageLayout)
1984 void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
1986 log << TestLog::Message << commandIndex << ":" << getName()
1987 << " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
1988 : m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
1989 : "Image pipeline barrier")
1990 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
1991 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
1994 void PipelineBarrier::submit (SubmitContext& context)
1996 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1997 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
1999 // \todo [2016-01-08 pyry] This could be cleaned up thanks to latest API changes
2005 const vk::VkMemoryBarrier barrier =
2007 vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
2014 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 1, &barrier, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2020 const vk::VkBufferMemoryBarrier barrier =
2022 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2028 vk::VK_QUEUE_FAMILY_IGNORED,
2029 vk::VK_QUEUE_FAMILY_IGNORED,
2031 context.getBuffer(),
2036 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2042 const vk::VkImageMemoryBarrier barrier =
2044 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2053 vk::VK_QUEUE_FAMILY_IGNORED,
2054 vk::VK_QUEUE_FAMILY_IGNORED,
2058 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2064 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2069 DE_FATAL("Unknown pipeline barrier type");
2073 class ImageTransition : public CmdCommand
2076 ImageTransition (vk::VkPipelineStageFlags srcStages,
2077 vk::VkAccessFlags srcAccesses,
2079 vk::VkPipelineStageFlags dstStages,
2080 vk::VkAccessFlags dstAccesses,
2082 vk::VkImageLayout srcLayout,
2083 vk::VkImageLayout dstLayout);
2085 ~ImageTransition (void) {}
2086 const char* getName (void) const { return "ImageTransition"; }
2088 void prepare (PrepareContext& context);
2089 void logSubmit (TestLog& log, size_t commandIndex) const;
2090 void submit (SubmitContext& context);
2091 void verify (VerifyContext& context, size_t);
2094 const vk::VkPipelineStageFlags m_srcStages;
2095 const vk::VkAccessFlags m_srcAccesses;
2096 const vk::VkPipelineStageFlags m_dstStages;
2097 const vk::VkAccessFlags m_dstAccesses;
2098 const vk::VkImageLayout m_srcLayout;
2099 const vk::VkImageLayout m_dstLayout;
2101 vk::VkDeviceSize m_imageMemorySize;
2104 ImageTransition::ImageTransition (vk::VkPipelineStageFlags srcStages,
2105 vk::VkAccessFlags srcAccesses,
2107 vk::VkPipelineStageFlags dstStages,
2108 vk::VkAccessFlags dstAccesses,
2110 vk::VkImageLayout srcLayout,
2111 vk::VkImageLayout dstLayout)
2112 : m_srcStages (srcStages)
2113 , m_srcAccesses (srcAccesses)
2114 , m_dstStages (dstStages)
2115 , m_dstAccesses (dstAccesses)
2116 , m_srcLayout (srcLayout)
2117 , m_dstLayout (dstLayout)
2121 void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
2123 log << TestLog::Message << commandIndex << ":" << getName()
2124 << " Image transition pipeline barrier"
2125 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2126 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses)
2127 << ", srcLayout: " << m_srcLayout << ", dstLayout: " << m_dstLayout << TestLog::EndMessage;
2130 void ImageTransition::prepare (PrepareContext& context)
2132 DE_ASSERT(context.getImageLayout() == vk::VK_IMAGE_LAYOUT_UNDEFINED || m_srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED || context.getImageLayout() == m_srcLayout);
2134 context.setImageLayout(m_dstLayout);
2135 m_imageMemorySize = context.getImageMemorySize();
2138 void ImageTransition::submit (SubmitContext& context)
2140 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2141 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2142 const vk::VkImageMemoryBarrier barrier =
2144 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2153 vk::VK_QUEUE_FAMILY_IGNORED,
2154 vk::VK_QUEUE_FAMILY_IGNORED,
2158 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2164 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2167 void ImageTransition::verify (VerifyContext& context, size_t)
2169 context.getReference().setUndefined(0, (size_t)m_imageMemorySize);
2172 class FillBuffer : public CmdCommand
2175 FillBuffer (deUint32 value) : m_value(value) {}
2176 ~FillBuffer (void) {}
2177 const char* getName (void) const { return "FillBuffer"; }
2179 void logSubmit (TestLog& log, size_t commandIndex) const;
2180 void submit (SubmitContext& context);
2181 void verify (VerifyContext& context, size_t commandIndex);
2184 const deUint32 m_value;
2185 vk::VkDeviceSize m_bufferSize;
2188 void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2190 log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
2193 void FillBuffer::submit (SubmitContext& context)
2195 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2196 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2197 const vk::VkBuffer buffer = context.getBuffer();
2198 const vk::VkDeviceSize sizeMask = ~(0x3ull); // \note Round down to multiple of 4
2200 m_bufferSize = sizeMask & context.getBufferSize();
2201 vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
2204 void FillBuffer::verify (VerifyContext& context, size_t)
2206 ReferenceMemory& reference = context.getReference();
2208 for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
2210 #if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2211 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
2213 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
2218 class UpdateBuffer : public CmdCommand
2221 UpdateBuffer (deUint32 seed) : m_seed(seed) {}
2222 ~UpdateBuffer (void) {}
2223 const char* getName (void) const { return "UpdateBuffer"; }
2225 void logSubmit (TestLog& log, size_t commandIndex) const;
2226 void submit (SubmitContext& context);
2227 void verify (VerifyContext& context, size_t commandIndex);
2230 const deUint32 m_seed;
2231 vk::VkDeviceSize m_bufferSize;
2234 void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2236 log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
2239 void UpdateBuffer::submit (SubmitContext& context)
2241 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2242 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2243 const vk::VkBuffer buffer = context.getBuffer();
2244 const size_t blockSize = 65536;
2245 std::vector<deUint8> data (blockSize, 0);
2246 de::Random rng (m_seed);
2248 m_bufferSize = context.getBufferSize();
2250 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2252 for (size_t ndx = 0; ndx < data.size(); ndx++)
2253 data[ndx] = rng.getUint8();
2255 if (m_bufferSize - updated > blockSize)
2256 vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
2258 vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
2262 void UpdateBuffer::verify (VerifyContext& context, size_t)
2264 ReferenceMemory& reference = context.getReference();
2265 const size_t blockSize = 65536;
2266 vector<deUint8> data (blockSize, 0);
2267 de::Random rng (m_seed);
2269 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2271 for (size_t ndx = 0; ndx < data.size(); ndx++)
2272 data[ndx] = rng.getUint8();
2274 if (m_bufferSize - updated > blockSize)
2275 reference.setData(updated, blockSize, &data[0]);
2277 reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
2281 class BufferCopyToBuffer : public CmdCommand
2284 BufferCopyToBuffer (void) {}
2285 ~BufferCopyToBuffer (void) {}
2286 const char* getName (void) const { return "BufferCopyToBuffer"; }
2288 void logPrepare (TestLog& log, size_t commandIndex) const;
2289 void prepare (PrepareContext& context);
2290 void logSubmit (TestLog& log, size_t commandIndex) const;
2291 void submit (SubmitContext& context);
2292 void verify (VerifyContext& context, size_t commandIndex);
2295 vk::VkDeviceSize m_bufferSize;
2296 vk::Move<vk::VkBuffer> m_dstBuffer;
2297 vk::Move<vk::VkDeviceMemory> m_memory;
2300 void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2302 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
2305 void BufferCopyToBuffer::prepare (PrepareContext& context)
2307 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2308 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2309 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2310 const vk::VkDevice device = context.getContext().getDevice();
2311 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2313 m_bufferSize = context.getBufferSize();
2315 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2316 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2319 void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2321 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
2324 void BufferCopyToBuffer::submit (SubmitContext& context)
2326 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2327 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2328 const vk::VkBufferCopy range =
2334 vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
2337 void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2339 tcu::ResultCollector& resultCollector (context.getResultCollector());
2340 ReferenceMemory& reference (context.getReference());
2341 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2342 const vk::VkDevice device = context.getContext().getDevice();
2343 const vk::VkQueue queue = context.getContext().getQueue();
2344 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2345 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2346 const vk::VkBufferMemoryBarrier barrier =
2348 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2351 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2352 vk::VK_ACCESS_HOST_READ_BIT,
2354 vk::VK_QUEUE_FAMILY_IGNORED,
2355 vk::VK_QUEUE_FAMILY_IGNORED,
2361 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2363 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2364 queueRun(vkd, queue, *commandBuffer);
2367 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2370 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2373 const deUint8* const data = (const deUint8*)ptr;
2375 for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
2377 if (reference.isDefined(pos))
2379 if (data[pos] != reference.get(pos))
2381 resultCollector.fail(
2382 de::toString(commandIndex) + ":" + getName()
2383 + " Result differs from reference, Expected: "
2384 + de::toString(tcu::toHex<8>(reference.get(pos)))
2386 + de::toString(tcu::toHex<8>(data[pos]))
2388 + de::toString(pos));
2395 vkd.unmapMemory(device, *m_memory);
2398 context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
2402 class BufferCopyFromBuffer : public CmdCommand
2405 BufferCopyFromBuffer (deUint32 seed) : m_seed(seed) {}
2406 ~BufferCopyFromBuffer (void) {}
2407 const char* getName (void) const { return "BufferCopyFromBuffer"; }
2409 void logPrepare (TestLog& log, size_t commandIndex) const;
2410 void prepare (PrepareContext& context);
2411 void logSubmit (TestLog& log, size_t commandIndex) const;
2412 void submit (SubmitContext& context);
2413 void verify (VerifyContext& context, size_t commandIndex);
2416 const deUint32 m_seed;
2417 vk::VkDeviceSize m_bufferSize;
2418 vk::Move<vk::VkBuffer> m_srcBuffer;
2419 vk::Move<vk::VkDeviceMemory> m_memory;
2422 void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2424 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
2427 void BufferCopyFromBuffer::prepare (PrepareContext& context)
2429 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2430 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2431 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2432 const vk::VkDevice device = context.getContext().getDevice();
2433 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2435 m_bufferSize = context.getBufferSize();
2436 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2437 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2440 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2441 de::Random rng (m_seed);
2444 deUint8* const data = (deUint8*)ptr;
2446 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2447 data[ndx] = rng.getUint8();
2450 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2451 vkd.unmapMemory(device, *m_memory);
2455 void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2457 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
2460 void BufferCopyFromBuffer::submit (SubmitContext& context)
2462 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2463 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2464 const vk::VkBufferCopy range =
2470 vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
2473 void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
2475 ReferenceMemory& reference (context.getReference());
2476 de::Random rng (m_seed);
2478 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2479 reference.set(ndx, rng.getUint8());
2482 class BufferCopyToImage : public CmdCommand
2485 BufferCopyToImage (void) {}
2486 ~BufferCopyToImage (void) {}
2487 const char* getName (void) const { return "BufferCopyToImage"; }
2489 void logPrepare (TestLog& log, size_t commandIndex) const;
2490 void prepare (PrepareContext& context);
2491 void logSubmit (TestLog& log, size_t commandIndex) const;
2492 void submit (SubmitContext& context);
2493 void verify (VerifyContext& context, size_t commandIndex);
2496 deInt32 m_imageWidth;
2497 deInt32 m_imageHeight;
2498 vk::Move<vk::VkImage> m_dstImage;
2499 vk::Move<vk::VkDeviceMemory> m_memory;
2502 void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
2504 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
2507 void BufferCopyToImage::prepare (PrepareContext& context)
2509 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2510 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2511 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2512 const vk::VkDevice device = context.getContext().getDevice();
2513 const vk::VkQueue queue = context.getContext().getQueue();
2514 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2515 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2516 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2518 m_imageWidth = imageSize[0];
2519 m_imageHeight = imageSize[1];
2522 const vk::VkImageCreateInfo createInfo =
2524 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2528 vk::VK_IMAGE_TYPE_2D,
2529 vk::VK_FORMAT_R8G8B8A8_UNORM,
2531 (deUint32)m_imageWidth,
2532 (deUint32)m_imageHeight,
2535 1, 1, // mipLevels, arrayLayers
2536 vk::VK_SAMPLE_COUNT_1_BIT,
2538 vk::VK_IMAGE_TILING_OPTIMAL,
2539 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2540 vk::VK_SHARING_MODE_EXCLUSIVE,
2542 (deUint32)queueFamilies.size(),
2544 vk::VK_IMAGE_LAYOUT_UNDEFINED
2547 m_dstImage = vk::createImage(vkd, device, &createInfo);
2550 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
2553 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2554 const vk::VkImageMemoryBarrier barrier =
2556 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2560 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2562 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2563 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2565 vk::VK_QUEUE_FAMILY_IGNORED,
2566 vk::VK_QUEUE_FAMILY_IGNORED,
2570 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2572 1, // Mip level count
2578 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2580 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2581 queueRun(vkd, queue, *commandBuffer);
2585 void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
2587 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
2590 void BufferCopyToImage::submit (SubmitContext& context)
2592 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2593 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2594 const vk::VkBufferImageCopy region =
2599 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2606 (deUint32)m_imageWidth,
2607 (deUint32)m_imageHeight,
2612 vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2615 void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
2617 tcu::ResultCollector& resultCollector (context.getResultCollector());
2618 ReferenceMemory& reference (context.getReference());
2619 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2620 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2621 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2622 const vk::VkDevice device = context.getContext().getDevice();
2623 const vk::VkQueue queue = context.getContext().getQueue();
2624 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2625 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2626 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2627 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2628 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2630 const vk::VkImageMemoryBarrier imageBarrier =
2632 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2635 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2636 vk::VK_ACCESS_TRANSFER_READ_BIT,
2638 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2639 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2641 vk::VK_QUEUE_FAMILY_IGNORED,
2642 vk::VK_QUEUE_FAMILY_IGNORED,
2646 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2648 1, // Mip level count
2653 const vk::VkBufferMemoryBarrier bufferBarrier =
2655 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2658 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2659 vk::VK_ACCESS_HOST_READ_BIT,
2661 vk::VK_QUEUE_FAMILY_IGNORED,
2662 vk::VK_QUEUE_FAMILY_IGNORED,
2668 const vk::VkBufferImageCopy region =
2673 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2680 (deUint32)m_imageWidth,
2681 (deUint32)m_imageHeight,
2686 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
2687 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
2688 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2691 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2692 queueRun(vkd, queue, *commandBuffer);
2695 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2697 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2700 const deUint8* const data = (const deUint8*)ptr;
2702 for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
2704 if (reference.isDefined(pos))
2706 if (data[pos] != reference.get(pos))
2708 resultCollector.fail(
2709 de::toString(commandIndex) + ":" + getName()
2710 + " Result differs from reference, Expected: "
2711 + de::toString(tcu::toHex<8>(reference.get(pos)))
2713 + de::toString(tcu::toHex<8>(data[pos]))
2715 + de::toString(pos));
2722 vkd.unmapMemory(device, *memory);
2726 class BufferCopyFromImage : public CmdCommand
2729 BufferCopyFromImage (deUint32 seed) : m_seed(seed) {}
2730 ~BufferCopyFromImage (void) {}
2731 const char* getName (void) const { return "BufferCopyFromImage"; }
2733 void logPrepare (TestLog& log, size_t commandIndex) const;
2734 void prepare (PrepareContext& context);
2735 void logSubmit (TestLog& log, size_t commandIndex) const;
2736 void submit (SubmitContext& context);
2737 void verify (VerifyContext& context, size_t commandIndex);
2740 const deUint32 m_seed;
2741 deInt32 m_imageWidth;
2742 deInt32 m_imageHeight;
2743 vk::Move<vk::VkImage> m_srcImage;
2744 vk::Move<vk::VkDeviceMemory> m_memory;
2747 void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
2749 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
2752 void BufferCopyFromImage::prepare (PrepareContext& context)
2754 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2755 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2756 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2757 const vk::VkDevice device = context.getContext().getDevice();
2758 const vk::VkQueue queue = context.getContext().getQueue();
2759 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2760 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2761 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2763 m_imageWidth = imageSize[0];
2764 m_imageHeight = imageSize[1];
2767 const vk::VkImageCreateInfo createInfo =
2769 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2773 vk::VK_IMAGE_TYPE_2D,
2774 vk::VK_FORMAT_R8G8B8A8_UNORM,
2776 (deUint32)m_imageWidth,
2777 (deUint32)m_imageHeight,
2780 1, 1, // mipLevels, arrayLayers
2781 vk::VK_SAMPLE_COUNT_1_BIT,
2783 vk::VK_IMAGE_TILING_OPTIMAL,
2784 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2785 vk::VK_SHARING_MODE_EXCLUSIVE,
2787 (deUint32)queueFamilies.size(),
2789 vk::VK_IMAGE_LAYOUT_UNDEFINED
2792 m_srcImage = vk::createImage(vkd, device, &createInfo);
2795 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
2798 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2799 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2800 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2801 const vk::VkImageMemoryBarrier preImageBarrier =
2803 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2807 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2809 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2810 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2812 vk::VK_QUEUE_FAMILY_IGNORED,
2813 vk::VK_QUEUE_FAMILY_IGNORED,
2817 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2819 1, // Mip level count
2824 const vk::VkImageMemoryBarrier postImageBarrier =
2826 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2829 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2832 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2833 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2835 vk::VK_QUEUE_FAMILY_IGNORED,
2836 vk::VK_QUEUE_FAMILY_IGNORED,
2840 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2842 1, // Mip level count
2847 const vk::VkBufferImageCopy region =
2852 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2859 (deUint32)m_imageWidth,
2860 (deUint32)m_imageHeight,
2866 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2867 de::Random rng (m_seed);
2870 deUint8* const data = (deUint8*)ptr;
2872 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2873 data[ndx] = rng.getUint8();
2876 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2877 vkd.unmapMemory(device, *memory);
2880 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
2881 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2882 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
2884 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2885 queueRun(vkd, queue, *commandBuffer);
2889 void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
2891 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
2894 void BufferCopyFromImage::submit (SubmitContext& context)
2896 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2897 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2898 const vk::VkBufferImageCopy region =
2903 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2910 (deUint32)m_imageWidth,
2911 (deUint32)m_imageHeight,
2916 vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, ®ion);
2919 void BufferCopyFromImage::verify (VerifyContext& context, size_t)
2921 ReferenceMemory& reference (context.getReference());
2922 de::Random rng (m_seed);
2924 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2925 reference.set(ndx, rng.getUint8());
2928 class ImageCopyToBuffer : public CmdCommand
2931 ImageCopyToBuffer (vk::VkImageLayout imageLayout) : m_imageLayout (imageLayout) {}
2932 ~ImageCopyToBuffer (void) {}
2933 const char* getName (void) const { return "BufferCopyToImage"; }
2935 void logPrepare (TestLog& log, size_t commandIndex) const;
2936 void prepare (PrepareContext& context);
2937 void logSubmit (TestLog& log, size_t commandIndex) const;
2938 void submit (SubmitContext& context);
2939 void verify (VerifyContext& context, size_t commandIndex);
2942 vk::VkImageLayout m_imageLayout;
2943 vk::VkDeviceSize m_bufferSize;
2944 vk::Move<vk::VkBuffer> m_dstBuffer;
2945 vk::Move<vk::VkDeviceMemory> m_memory;
2946 vk::VkDeviceSize m_imageMemorySize;
2947 deInt32 m_imageWidth;
2948 deInt32 m_imageHeight;
2951 void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2953 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
2956 void ImageCopyToBuffer::prepare (PrepareContext& context)
2958 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2959 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2960 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2961 const vk::VkDevice device = context.getContext().getDevice();
2962 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2964 m_imageWidth = context.getImageWidth();
2965 m_imageHeight = context.getImageHeight();
2966 m_bufferSize = 4 * m_imageWidth * m_imageHeight;
2967 m_imageMemorySize = context.getImageMemorySize();
2968 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2969 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2972 void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2974 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
2977 void ImageCopyToBuffer::submit (SubmitContext& context)
2979 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2980 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2981 const vk::VkBufferImageCopy region =
2986 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2993 (deUint32)m_imageWidth,
2994 (deUint32)m_imageHeight,
2999 vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), m_imageLayout, *m_dstBuffer, 1, ®ion);
3002 void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
3004 tcu::ResultCollector& resultCollector (context.getResultCollector());
3005 ReferenceMemory& reference (context.getReference());
3006 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3007 const vk::VkDevice device = context.getContext().getDevice();
3008 const vk::VkQueue queue = context.getContext().getQueue();
3009 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3010 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3011 const vk::VkBufferMemoryBarrier barrier =
3013 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3016 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3017 vk::VK_ACCESS_HOST_READ_BIT,
3019 vk::VK_QUEUE_FAMILY_IGNORED,
3020 vk::VK_QUEUE_FAMILY_IGNORED,
3026 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3028 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3029 queueRun(vkd, queue, *commandBuffer);
3031 reference.setUndefined(0, (size_t)m_imageMemorySize);
3033 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3034 const ConstPixelBufferAccess referenceImage (context.getReferenceImage().getAccess());
3035 const ConstPixelBufferAccess resultImage (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
3037 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3039 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3040 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3042 vkd.unmapMemory(device, *m_memory);
3046 class ImageCopyFromBuffer : public CmdCommand
3049 ImageCopyFromBuffer (deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3050 ~ImageCopyFromBuffer (void) {}
3051 const char* getName (void) const { return "ImageCopyFromBuffer"; }
3053 void logPrepare (TestLog& log, size_t commandIndex) const;
3054 void prepare (PrepareContext& context);
3055 void logSubmit (TestLog& log, size_t commandIndex) const;
3056 void submit (SubmitContext& context);
3057 void verify (VerifyContext& context, size_t commandIndex);
3060 const deUint32 m_seed;
3061 const vk::VkImageLayout m_imageLayout;
3062 deInt32 m_imageWidth;
3063 deInt32 m_imageHeight;
3064 vk::VkDeviceSize m_imageMemorySize;
3065 vk::VkDeviceSize m_bufferSize;
3066 vk::Move<vk::VkBuffer> m_srcBuffer;
3067 vk::Move<vk::VkDeviceMemory> m_memory;
3070 void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
3072 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
3075 void ImageCopyFromBuffer::prepare (PrepareContext& context)
3077 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3078 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3079 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3080 const vk::VkDevice device = context.getContext().getDevice();
3081 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3083 m_imageWidth = context.getImageHeight();
3084 m_imageHeight = context.getImageWidth();
3085 m_imageMemorySize = context.getImageMemorySize();
3086 m_bufferSize = m_imageWidth * m_imageHeight * 4;
3087 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
3088 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
3091 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3092 de::Random rng (m_seed);
3095 deUint8* const data = (deUint8*)ptr;
3097 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
3098 data[ndx] = rng.getUint8();
3101 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3102 vkd.unmapMemory(device, *m_memory);
3106 void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
3108 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
3111 void ImageCopyFromBuffer::submit (SubmitContext& context)
3113 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3114 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3115 const vk::VkBufferImageCopy region =
3120 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3127 (deUint32)m_imageWidth,
3128 (deUint32)m_imageHeight,
3133 vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), m_imageLayout, 1, ®ion);
3136 void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
3138 ReferenceMemory& reference (context.getReference());
3139 de::Random rng (m_seed);
3141 reference.setUndefined(0, (size_t)m_imageMemorySize);
3144 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3146 for (deInt32 y = 0; y < m_imageHeight; y++)
3147 for (deInt32 x = 0; x < m_imageWidth; x++)
3149 const deUint8 r8 = rng.getUint8();
3150 const deUint8 g8 = rng.getUint8();
3151 const deUint8 b8 = rng.getUint8();
3152 const deUint8 a8 = rng.getUint8();
3154 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3159 class ImageCopyFromImage : public CmdCommand
3162 ImageCopyFromImage (deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
3163 ~ImageCopyFromImage (void) {}
3164 const char* getName (void) const { return "ImageCopyFromImage"; }
3166 void logPrepare (TestLog& log, size_t commandIndex) const;
3167 void prepare (PrepareContext& context);
3168 void logSubmit (TestLog& log, size_t commandIndex) const;
3169 void submit (SubmitContext& context);
3170 void verify (VerifyContext& context, size_t commandIndex);
3173 const deUint32 m_seed;
3174 const vk::VkImageLayout m_imageLayout;
3175 deInt32 m_imageWidth;
3176 deInt32 m_imageHeight;
3177 vk::VkDeviceSize m_imageMemorySize;
3178 vk::Move<vk::VkImage> m_srcImage;
3179 vk::Move<vk::VkDeviceMemory> m_memory;
3182 void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3184 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
3187 void ImageCopyFromImage::prepare (PrepareContext& context)
3189 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3190 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3191 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3192 const vk::VkDevice device = context.getContext().getDevice();
3193 const vk::VkQueue queue = context.getContext().getQueue();
3194 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3195 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3197 m_imageWidth = context.getImageWidth();
3198 m_imageHeight = context.getImageHeight();
3199 m_imageMemorySize = context.getImageMemorySize();
3202 const vk::VkImageCreateInfo createInfo =
3204 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3208 vk::VK_IMAGE_TYPE_2D,
3209 vk::VK_FORMAT_R8G8B8A8_UNORM,
3211 (deUint32)m_imageWidth,
3212 (deUint32)m_imageHeight,
3215 1, 1, // mipLevels, arrayLayers
3216 vk::VK_SAMPLE_COUNT_1_BIT,
3218 vk::VK_IMAGE_TILING_OPTIMAL,
3219 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3220 vk::VK_SHARING_MODE_EXCLUSIVE,
3222 (deUint32)queueFamilies.size(),
3224 vk::VK_IMAGE_LAYOUT_UNDEFINED
3227 m_srcImage = vk::createImage(vkd, device, &createInfo);
3230 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3233 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3234 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3235 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3236 const vk::VkImageMemoryBarrier preImageBarrier =
3238 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3242 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3244 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3245 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3247 vk::VK_QUEUE_FAMILY_IGNORED,
3248 vk::VK_QUEUE_FAMILY_IGNORED,
3252 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3254 1, // Mip level count
3259 const vk::VkImageMemoryBarrier postImageBarrier =
3261 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3264 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3267 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3268 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3270 vk::VK_QUEUE_FAMILY_IGNORED,
3271 vk::VK_QUEUE_FAMILY_IGNORED,
3275 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3277 1, // Mip level count
3282 const vk::VkBufferImageCopy region =
3287 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3294 (deUint32)m_imageWidth,
3295 (deUint32)m_imageHeight,
3301 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3302 de::Random rng (m_seed);
3305 deUint8* const data = (deUint8*)ptr;
3307 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
3308 data[ndx] = rng.getUint8();
3311 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3312 vkd.unmapMemory(device, *memory);
3315 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3316 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3317 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3319 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3320 queueRun(vkd, queue, *commandBuffer);
3324 void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3326 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
3329 void ImageCopyFromImage::submit (SubmitContext& context)
3331 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3332 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3333 const vk::VkImageCopy region =
3336 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3344 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3351 (deUint32)m_imageWidth,
3352 (deUint32)m_imageHeight,
3357 vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, ®ion);
3360 void ImageCopyFromImage::verify (VerifyContext& context, size_t)
3362 ReferenceMemory& reference (context.getReference());
3363 de::Random rng (m_seed);
3365 reference.setUndefined(0, (size_t)m_imageMemorySize);
3368 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3370 for (deInt32 y = 0; y < m_imageHeight; y++)
3371 for (deInt32 x = 0; x < m_imageWidth; x++)
3373 const deUint8 r8 = rng.getUint8();
3374 const deUint8 g8 = rng.getUint8();
3375 const deUint8 b8 = rng.getUint8();
3376 const deUint8 a8 = rng.getUint8();
3378 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3383 class ImageCopyToImage : public CmdCommand
3386 ImageCopyToImage (vk::VkImageLayout imageLayout) : m_imageLayout(imageLayout) {}
3387 ~ImageCopyToImage (void) {}
3388 const char* getName (void) const { return "ImageCopyToImage"; }
3390 void logPrepare (TestLog& log, size_t commandIndex) const;
3391 void prepare (PrepareContext& context);
3392 void logSubmit (TestLog& log, size_t commandIndex) const;
3393 void submit (SubmitContext& context);
3394 void verify (VerifyContext& context, size_t commandIndex);
3397 const vk::VkImageLayout m_imageLayout;
3398 deInt32 m_imageWidth;
3399 deInt32 m_imageHeight;
3400 vk::VkDeviceSize m_imageMemorySize;
3401 vk::Move<vk::VkImage> m_dstImage;
3402 vk::Move<vk::VkDeviceMemory> m_memory;
3405 void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
3407 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
3410 void ImageCopyToImage::prepare (PrepareContext& context)
3412 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3413 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3414 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3415 const vk::VkDevice device = context.getContext().getDevice();
3416 const vk::VkQueue queue = context.getContext().getQueue();
3417 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3418 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3420 m_imageWidth = context.getImageWidth();
3421 m_imageHeight = context.getImageHeight();
3422 m_imageMemorySize = context.getImageMemorySize();
3425 const vk::VkImageCreateInfo createInfo =
3427 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3431 vk::VK_IMAGE_TYPE_2D,
3432 vk::VK_FORMAT_R8G8B8A8_UNORM,
3434 (deUint32)m_imageWidth,
3435 (deUint32)m_imageHeight,
3438 1, 1, // mipLevels, arrayLayers
3439 vk::VK_SAMPLE_COUNT_1_BIT,
3441 vk::VK_IMAGE_TILING_OPTIMAL,
3442 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3443 vk::VK_SHARING_MODE_EXCLUSIVE,
3445 (deUint32)queueFamilies.size(),
3447 vk::VK_IMAGE_LAYOUT_UNDEFINED
3450 m_dstImage = vk::createImage(vkd, device, &createInfo);
3453 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3456 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3457 const vk::VkImageMemoryBarrier barrier =
3459 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3463 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3465 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3466 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3468 vk::VK_QUEUE_FAMILY_IGNORED,
3469 vk::VK_QUEUE_FAMILY_IGNORED,
3473 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3475 1, // Mip level count
3481 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
3483 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3484 queueRun(vkd, queue, *commandBuffer);
3488 void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
3490 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
3493 void ImageCopyToImage::submit (SubmitContext& context)
3495 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3496 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3497 const vk::VkImageCopy region =
3500 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3508 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3515 (deUint32)m_imageWidth,
3516 (deUint32)m_imageHeight,
3521 vkd.cmdCopyImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3524 void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
3526 tcu::ResultCollector& resultCollector (context.getResultCollector());
3527 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3528 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3529 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3530 const vk::VkDevice device = context.getContext().getDevice();
3531 const vk::VkQueue queue = context.getContext().getQueue();
3532 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3533 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3534 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3535 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3536 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3538 const vk::VkImageMemoryBarrier imageBarrier =
3540 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3543 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3544 vk::VK_ACCESS_TRANSFER_READ_BIT,
3546 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3547 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3549 vk::VK_QUEUE_FAMILY_IGNORED,
3550 vk::VK_QUEUE_FAMILY_IGNORED,
3554 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3556 1, // Mip level count
3561 const vk::VkBufferMemoryBarrier bufferBarrier =
3563 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3566 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3567 vk::VK_ACCESS_HOST_READ_BIT,
3569 vk::VK_QUEUE_FAMILY_IGNORED,
3570 vk::VK_QUEUE_FAMILY_IGNORED,
3575 const vk::VkBufferImageCopy region =
3580 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3587 (deUint32)m_imageWidth,
3588 (deUint32)m_imageHeight,
3593 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
3594 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
3595 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3598 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3599 queueRun(vkd, queue, *commandBuffer);
3602 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3604 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3607 const deUint8* const data = (const deUint8*)ptr;
3608 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
3609 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3611 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3612 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3615 vkd.unmapMemory(device, *memory);
3625 class ImageBlitFromImage : public CmdCommand
3628 ImageBlitFromImage (deUint32 seed, BlitScale scale, vk::VkImageLayout imageLayout) : m_seed(seed), m_scale(scale), m_imageLayout(imageLayout) {}
3629 ~ImageBlitFromImage (void) {}
3630 const char* getName (void) const { return "ImageBlitFromImage"; }
3632 void logPrepare (TestLog& log, size_t commandIndex) const;
3633 void prepare (PrepareContext& context);
3634 void logSubmit (TestLog& log, size_t commandIndex) const;
3635 void submit (SubmitContext& context);
3636 void verify (VerifyContext& context, size_t commandIndex);
3639 const deUint32 m_seed;
3640 const BlitScale m_scale;
3641 const vk::VkImageLayout m_imageLayout;
3642 deInt32 m_imageWidth;
3643 deInt32 m_imageHeight;
3644 vk::VkDeviceSize m_imageMemorySize;
3645 deInt32 m_srcImageWidth;
3646 deInt32 m_srcImageHeight;
3647 vk::Move<vk::VkImage> m_srcImage;
3648 vk::Move<vk::VkDeviceMemory> m_memory;
3651 void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3653 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
3656 void ImageBlitFromImage::prepare (PrepareContext& context)
3658 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3659 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3660 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3661 const vk::VkDevice device = context.getContext().getDevice();
3662 const vk::VkQueue queue = context.getContext().getQueue();
3663 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3664 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3666 m_imageWidth = context.getImageWidth();
3667 m_imageHeight = context.getImageHeight();
3668 m_imageMemorySize = context.getImageMemorySize();
3670 if (m_scale == BLIT_SCALE_10)
3672 m_srcImageWidth = m_imageWidth;
3673 m_srcImageHeight = m_imageHeight;
3675 else if (m_scale == BLIT_SCALE_20)
3677 m_srcImageWidth = m_imageWidth / 2;
3678 m_srcImageHeight = m_imageHeight / 2;
3681 DE_FATAL("Unsupported scale");
3684 const vk::VkImageCreateInfo createInfo =
3686 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3690 vk::VK_IMAGE_TYPE_2D,
3691 vk::VK_FORMAT_R8G8B8A8_UNORM,
3693 (deUint32)m_srcImageWidth,
3694 (deUint32)m_srcImageHeight,
3697 1, 1, // mipLevels, arrayLayers
3698 vk::VK_SAMPLE_COUNT_1_BIT,
3700 vk::VK_IMAGE_TILING_OPTIMAL,
3701 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3702 vk::VK_SHARING_MODE_EXCLUSIVE,
3704 (deUint32)queueFamilies.size(),
3706 vk::VK_IMAGE_LAYOUT_UNDEFINED
3709 m_srcImage = vk::createImage(vkd, device, &createInfo);
3712 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3715 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3716 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3717 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3718 const vk::VkImageMemoryBarrier preImageBarrier =
3720 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3724 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3726 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3727 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3729 vk::VK_QUEUE_FAMILY_IGNORED,
3730 vk::VK_QUEUE_FAMILY_IGNORED,
3734 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3736 1, // Mip level count
3741 const vk::VkImageMemoryBarrier postImageBarrier =
3743 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3746 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3749 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3750 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3752 vk::VK_QUEUE_FAMILY_IGNORED,
3753 vk::VK_QUEUE_FAMILY_IGNORED,
3757 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3759 1, // Mip level count
3764 const vk::VkBufferImageCopy region =
3769 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3776 (deUint32)m_srcImageWidth,
3777 (deUint32)m_srcImageHeight,
3783 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
3784 de::Random rng (m_seed);
3787 deUint8* const data = (deUint8*)ptr;
3789 for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
3790 data[ndx] = rng.getUint8();
3793 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
3794 vkd.unmapMemory(device, *memory);
3797 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3798 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3799 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3801 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3802 queueRun(vkd, queue, *commandBuffer);
3806 void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3808 log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
3811 void ImageBlitFromImage::submit (SubmitContext& context)
3813 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3814 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3815 const vk::VkImageBlit region =
3819 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3835 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3849 vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, ®ion, vk::VK_FILTER_NEAREST);
3852 void ImageBlitFromImage::verify (VerifyContext& context, size_t)
3854 ReferenceMemory& reference (context.getReference());
3855 de::Random rng (m_seed);
3857 reference.setUndefined(0, (size_t)m_imageMemorySize);
3860 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3862 if (m_scale == BLIT_SCALE_10)
3864 for (deInt32 y = 0; y < m_imageHeight; y++)
3865 for (deInt32 x = 0; x < m_imageWidth; x++)
3867 const deUint8 r8 = rng.getUint8();
3868 const deUint8 g8 = rng.getUint8();
3869 const deUint8 b8 = rng.getUint8();
3870 const deUint8 a8 = rng.getUint8();
3872 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3875 else if (m_scale == BLIT_SCALE_20)
3877 tcu::TextureLevel source (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
3878 const float xscale = ((float)m_srcImageWidth) / (float)m_imageWidth;
3879 const float yscale = ((float)m_srcImageHeight) / (float)m_imageHeight;
3881 for (deInt32 y = 0; y < m_srcImageHeight; y++)
3882 for (deInt32 x = 0; x < m_srcImageWidth; x++)
3884 const deUint8 r8 = rng.getUint8();
3885 const deUint8 g8 = rng.getUint8();
3886 const deUint8 b8 = rng.getUint8();
3887 const deUint8 a8 = rng.getUint8();
3889 source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
3892 for (deInt32 y = 0; y < m_imageHeight; y++)
3893 for (deInt32 x = 0; x < m_imageWidth; x++)
3894 refAccess.setPixel(source.getAccess().getPixelUint(int(float(x) * xscale), int(float(y) * yscale)), x, y);
3897 DE_FATAL("Unsupported scale");
3901 class ImageBlitToImage : public CmdCommand
3904 ImageBlitToImage (BlitScale scale, vk::VkImageLayout imageLayout) : m_scale(scale), m_imageLayout(imageLayout) {}
3905 ~ImageBlitToImage (void) {}
3906 const char* getName (void) const { return "ImageBlitToImage"; }
3908 void logPrepare (TestLog& log, size_t commandIndex) const;
3909 void prepare (PrepareContext& context);
3910 void logSubmit (TestLog& log, size_t commandIndex) const;
3911 void submit (SubmitContext& context);
3912 void verify (VerifyContext& context, size_t commandIndex);
3915 const BlitScale m_scale;
3916 const vk::VkImageLayout m_imageLayout;
3917 deInt32 m_imageWidth;
3918 deInt32 m_imageHeight;
3919 vk::VkDeviceSize m_imageMemorySize;
3920 deInt32 m_dstImageWidth;
3921 deInt32 m_dstImageHeight;
3922 vk::Move<vk::VkImage> m_dstImage;
3923 vk::Move<vk::VkDeviceMemory> m_memory;
3926 void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
3928 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
3931 void ImageBlitToImage::prepare (PrepareContext& context)
3933 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3934 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3935 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3936 const vk::VkDevice device = context.getContext().getDevice();
3937 const vk::VkQueue queue = context.getContext().getQueue();
3938 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3939 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3941 m_imageWidth = context.getImageWidth();
3942 m_imageHeight = context.getImageHeight();
3943 m_imageMemorySize = context.getImageMemorySize();
3945 if (m_scale == BLIT_SCALE_10)
3947 m_dstImageWidth = context.getImageWidth();
3948 m_dstImageHeight = context.getImageHeight();
3950 else if (m_scale == BLIT_SCALE_20)
3952 m_dstImageWidth = context.getImageWidth() * 2;
3953 m_dstImageHeight = context.getImageHeight() * 2;
3956 DE_FATAL("Unsupportd blit scale");
3959 const vk::VkImageCreateInfo createInfo =
3961 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3965 vk::VK_IMAGE_TYPE_2D,
3966 vk::VK_FORMAT_R8G8B8A8_UNORM,
3968 (deUint32)m_dstImageWidth,
3969 (deUint32)m_dstImageHeight,
3972 1, 1, // mipLevels, arrayLayers
3973 vk::VK_SAMPLE_COUNT_1_BIT,
3975 vk::VK_IMAGE_TILING_OPTIMAL,
3976 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3977 vk::VK_SHARING_MODE_EXCLUSIVE,
3979 (deUint32)queueFamilies.size(),
3981 vk::VK_IMAGE_LAYOUT_UNDEFINED
3984 m_dstImage = vk::createImage(vkd, device, &createInfo);
3987 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3990 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3991 const vk::VkImageMemoryBarrier barrier =
3993 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3997 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3999 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4000 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4002 vk::VK_QUEUE_FAMILY_IGNORED,
4003 vk::VK_QUEUE_FAMILY_IGNORED,
4007 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4009 1, // Mip level count
4015 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
4017 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4018 queueRun(vkd, queue, *commandBuffer);
4022 void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
4024 log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
4027 void ImageBlitToImage::submit (SubmitContext& context)
4029 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4030 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4031 const vk::VkImageBlit region =
4035 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4051 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4065 vkd.cmdBlitImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion, vk::VK_FILTER_NEAREST);
4068 void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
4070 tcu::ResultCollector& resultCollector (context.getResultCollector());
4071 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4072 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4073 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4074 const vk::VkDevice device = context.getContext().getDevice();
4075 const vk::VkQueue queue = context.getContext().getQueue();
4076 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4077 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4078 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4079 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4080 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4082 const vk::VkImageMemoryBarrier imageBarrier =
4084 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4087 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4088 vk::VK_ACCESS_TRANSFER_READ_BIT,
4090 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4091 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4093 vk::VK_QUEUE_FAMILY_IGNORED,
4094 vk::VK_QUEUE_FAMILY_IGNORED,
4098 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4100 1, // Mip level count
4105 const vk::VkBufferMemoryBarrier bufferBarrier =
4107 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4110 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4111 vk::VK_ACCESS_HOST_READ_BIT,
4113 vk::VK_QUEUE_FAMILY_IGNORED,
4114 vk::VK_QUEUE_FAMILY_IGNORED,
4119 const vk::VkBufferImageCopy region =
4124 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4131 (deUint32)m_dstImageWidth,
4132 (deUint32)m_dstImageHeight,
4137 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4138 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4139 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4142 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4143 queueRun(vkd, queue, *commandBuffer);
4146 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
4148 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_dstImageWidth * m_dstImageHeight);
4150 if (m_scale == BLIT_SCALE_10)
4152 const deUint8* const data = (const deUint8*)ptr;
4153 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4154 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4156 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4157 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4159 else if (m_scale == BLIT_SCALE_20)
4161 const deUint8* const data = (const deUint8*)ptr;
4162 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4163 tcu::TextureLevel reference (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
4166 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4168 for (deInt32 y = 0; y < m_dstImageHeight; y++)
4169 for (deInt32 x = 0; x < m_dstImageWidth; x++)
4171 reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
4175 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4176 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4179 DE_FATAL("Unknown scale");
4181 vkd.unmapMemory(device, *memory);
4185 class PrepareRenderPassContext
4188 PrepareRenderPassContext (PrepareContext& context,
4189 vk::VkRenderPass renderPass,
4190 vk::VkFramebuffer framebuffer,
4191 deInt32 targetWidth,
4192 deInt32 targetHeight)
4193 : m_context (context)
4194 , m_renderPass (renderPass)
4195 , m_framebuffer (framebuffer)
4196 , m_targetWidth (targetWidth)
4197 , m_targetHeight (targetHeight)
4201 const Memory& getMemory (void) const { return m_context.getMemory(); }
4202 const Context& getContext (void) const { return m_context.getContext(); }
4203 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
4205 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
4206 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
4208 vk::VkImage getImage (void) const { return m_context.getImage(); }
4209 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
4210 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
4211 vk::VkImageLayout getImageLayout (void) const { return m_context.getImageLayout(); }
4213 deInt32 getTargetWidth (void) const { return m_targetWidth; }
4214 deInt32 getTargetHeight (void) const { return m_targetHeight; }
4216 vk::VkRenderPass getRenderPass (void) const { return m_renderPass; }
4219 PrepareContext& m_context;
4220 const vk::VkRenderPass m_renderPass;
4221 const vk::VkFramebuffer m_framebuffer;
4222 const deInt32 m_targetWidth;
4223 const deInt32 m_targetHeight;
4226 class VerifyRenderPassContext
4229 VerifyRenderPassContext (VerifyContext& context,
4230 deInt32 targetWidth,
4231 deInt32 targetHeight)
4232 : m_context (context)
4233 , m_referenceTarget (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
4237 const Context& getContext (void) const { return m_context.getContext(); }
4238 TestLog& getLog (void) const { return m_context.getLog(); }
4239 tcu::ResultCollector& getResultCollector (void) const { return m_context.getResultCollector(); }
4241 TextureLevel& getReferenceTarget (void) { return m_referenceTarget; }
4243 ReferenceMemory& getReference (void) { return m_context.getReference(); }
4244 TextureLevel& getReferenceImage (void) { return m_context.getReferenceImage();}
4247 VerifyContext& m_context;
4248 TextureLevel m_referenceTarget;
4252 class RenderPassCommand
4255 virtual ~RenderPassCommand (void) {}
4256 virtual const char* getName (void) const = 0;
4258 // Log things that are done during prepare
4259 virtual void logPrepare (TestLog&, size_t) const {}
4260 // Log submitted calls etc.
4261 virtual void logSubmit (TestLog&, size_t) const {}
4263 // Allocate vulkan resources and prepare for submit.
4264 virtual void prepare (PrepareRenderPassContext&) {}
4266 // Submit commands to command buffer.
4267 virtual void submit (SubmitContext&) {}
4270 virtual void verify (VerifyRenderPassContext&, size_t) {}
4273 class SubmitRenderPass : public CmdCommand
4276 SubmitRenderPass (const vector<RenderPassCommand*>& commands);
4277 ~SubmitRenderPass (void) {}
4278 const char* getName (void) const { return "SubmitRenderPass"; }
4280 void logPrepare (TestLog&, size_t) const;
4281 void logSubmit (TestLog&, size_t) const;
4283 void prepare (PrepareContext&);
4284 void submit (SubmitContext&);
4286 void verify (VerifyContext&, size_t);
4289 const deInt32 m_targetWidth;
4290 const deInt32 m_targetHeight;
4291 vk::Move<vk::VkRenderPass> m_renderPass;
4292 vk::Move<vk::VkDeviceMemory> m_colorTargetMemory;
4293 de::MovePtr<vk::Allocation> m_colorTargetMemory2;
4294 vk::Move<vk::VkImage> m_colorTarget;
4295 vk::Move<vk::VkImageView> m_colorTargetView;
4296 vk::Move<vk::VkFramebuffer> m_framebuffer;
4297 vector<RenderPassCommand*> m_commands;
4300 SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
4301 : m_targetWidth (256)
4302 , m_targetHeight (256)
4303 , m_commands (commands)
4307 void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
4309 const string sectionName (de::toString(commandIndex) + ":" + getName());
4310 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4312 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4314 RenderPassCommand& command = *m_commands[cmdNdx];
4315 command.logPrepare(log, cmdNdx);
4319 void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
4321 const string sectionName (de::toString(commandIndex) + ":" + getName());
4322 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4324 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4326 RenderPassCommand& command = *m_commands[cmdNdx];
4327 command.logSubmit(log, cmdNdx);
4331 void SubmitRenderPass::prepare (PrepareContext& context)
4333 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4334 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4335 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4336 const vk::VkDevice device = context.getContext().getDevice();
4337 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4339 const vk::VkAttachmentReference colorAttachments[] =
4341 { 0, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }
4343 const vk::VkSubpassDescription subpass =
4346 vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
4351 DE_LENGTH_OF_ARRAY(colorAttachments),
4358 const vk::VkAttachmentDescription attachment =
4361 vk::VK_FORMAT_R8G8B8A8_UNORM,
4362 vk::VK_SAMPLE_COUNT_1_BIT,
4364 vk::VK_ATTACHMENT_LOAD_OP_CLEAR,
4365 vk::VK_ATTACHMENT_STORE_OP_STORE,
4367 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
4368 vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,
4370 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4371 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
4374 const vk::VkImageCreateInfo createInfo =
4376 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
4380 vk::VK_IMAGE_TYPE_2D,
4381 vk::VK_FORMAT_R8G8B8A8_UNORM,
4382 { (deUint32)m_targetWidth, (deUint32)m_targetHeight, 1u },
4385 vk::VK_SAMPLE_COUNT_1_BIT,
4386 vk::VK_IMAGE_TILING_OPTIMAL,
4387 vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
4388 vk::VK_SHARING_MODE_EXCLUSIVE,
4389 (deUint32)queueFamilies.size(),
4391 vk::VK_IMAGE_LAYOUT_UNDEFINED
4394 m_colorTarget = vk::createImage(vkd, device, &createInfo);
4397 m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
4400 const vk::VkImageViewCreateInfo createInfo =
4402 vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4407 vk::VK_IMAGE_VIEW_TYPE_2D,
4408 vk::VK_FORMAT_R8G8B8A8_UNORM,
4410 vk::VK_COMPONENT_SWIZZLE_R,
4411 vk::VK_COMPONENT_SWIZZLE_G,
4412 vk::VK_COMPONENT_SWIZZLE_B,
4413 vk::VK_COMPONENT_SWIZZLE_A
4416 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4424 m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
4427 const vk::VkRenderPassCreateInfo createInfo =
4429 vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
4443 m_renderPass = vk::createRenderPass(vkd, device, &createInfo);
4447 const vk::VkImageView imageViews[] =
4451 const vk::VkFramebufferCreateInfo createInfo =
4453 vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4458 DE_LENGTH_OF_ARRAY(imageViews),
4460 (deUint32)m_targetWidth,
4461 (deUint32)m_targetHeight,
4465 m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
4469 PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
4471 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4473 RenderPassCommand& command = *m_commands[cmdNdx];
4474 command.prepare(renderpassContext);
4479 void SubmitRenderPass::submit (SubmitContext& context)
4481 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4482 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4483 const vk::VkClearValue clearValue = vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 1.0f);
4485 const vk::VkRenderPassBeginInfo beginInfo =
4487 vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
4493 { { 0, 0 }, { (deUint32)m_targetWidth, (deUint32)m_targetHeight } },
4498 vkd.cmdBeginRenderPass(commandBuffer, &beginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
4500 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4502 RenderPassCommand& command = *m_commands[cmdNdx];
4504 command.submit(context);
4507 vkd.cmdEndRenderPass(commandBuffer);
4510 void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
4512 TestLog& log (context.getLog());
4513 tcu::ResultCollector& resultCollector (context.getResultCollector());
4514 const string sectionName (de::toString(commandIndex) + ":" + getName());
4515 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4516 VerifyRenderPassContext verifyContext (context, m_targetWidth, m_targetHeight);
4518 tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4520 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4522 RenderPassCommand& command = *m_commands[cmdNdx];
4523 command.verify(verifyContext, cmdNdx);
4527 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4528 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4529 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4530 const vk::VkDevice device = context.getContext().getDevice();
4531 const vk::VkQueue queue = context.getContext().getQueue();
4532 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4533 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4534 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4535 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4536 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4538 const vk::VkImageMemoryBarrier imageBarrier =
4540 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4543 vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
4544 vk::VK_ACCESS_TRANSFER_READ_BIT,
4546 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4547 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4549 vk::VK_QUEUE_FAMILY_IGNORED,
4550 vk::VK_QUEUE_FAMILY_IGNORED,
4554 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4556 1, // Mip level count
4561 const vk::VkBufferMemoryBarrier bufferBarrier =
4563 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4566 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4567 vk::VK_ACCESS_HOST_READ_BIT,
4569 vk::VK_QUEUE_FAMILY_IGNORED,
4570 vk::VK_QUEUE_FAMILY_IGNORED,
4575 const vk::VkBufferImageCopy region =
4580 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4587 (deUint32)m_targetWidth,
4588 (deUint32)m_targetHeight,
4593 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4594 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4595 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4598 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4599 queueRun(vkd, queue, *commandBuffer);
4602 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
4604 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_targetWidth * m_targetHeight);
4607 const deUint8* const data = (const deUint8*)ptr;
4608 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
4609 const ConstPixelBufferAccess& refAccess (verifyContext.getReferenceTarget().getAccess());
4611 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4612 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4615 vkd.unmapMemory(device, *memory);
4620 class RenderBuffer : public RenderPassCommand
4625 RENDERAS_VERTEX_BUFFER,
4626 RENDERAS_INDEX_BUFFER,
4628 RenderBuffer (RenderAs renderAs) : m_renderAs(renderAs) {}
4629 ~RenderBuffer (void) {}
4631 const char* getName (void) const { return "RenderBuffer"; }
4632 void logPrepare (TestLog&, size_t) const;
4633 void logSubmit (TestLog&, size_t) const;
4634 void prepare (PrepareRenderPassContext&);
4635 void submit (SubmitContext& context);
4636 void verify (VerifyRenderPassContext&, size_t);
4639 const RenderAs m_renderAs;
4640 vk::Move<vk::VkPipeline> m_pipeline;
4641 vk::Move<vk::VkPipelineLayout> m_pipelineLayout;
4642 vk::VkDeviceSize m_bufferSize;
4644 static const vk::ProgramBinary& getVertexShader (const vk::ProgramCollection<vk::ProgramBinary>& collections, RenderAs renderAs)
4648 case RENDERAS_VERTEX_BUFFER:
4649 return collections.get("vertex-buffer.vert");
4651 case RENDERAS_INDEX_BUFFER:
4652 return collections.get("index-buffer.vert");
4655 DE_FATAL("Unknown renderAs");
4656 return collections.get("");
4661 void RenderBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4663 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4666 void RenderBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4668 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4671 void RenderBuffer::prepare (PrepareRenderPassContext& context)
4673 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4674 const vk::VkDevice device = context.getContext().getDevice();
4675 const vk::VkRenderPass renderPass = context.getRenderPass();
4676 const deUint32 subpass = 0;
4677 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, getVertexShader(context.getBinaryCollection(), m_renderAs), 0));
4678 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4680 m_bufferSize = context.getBufferSize();
4683 const vk::VkPipelineLayoutCreateInfo createInfo =
4685 vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
4694 m_pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
4698 const vk::VkPipelineShaderStageCreateInfo shaderStages[] =
4701 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4704 vk::VK_SHADER_STAGE_VERTEX_BIT,
4705 *vertexShaderModule,
4710 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4713 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
4714 *fragmentShaderModule,
4719 const vk::VkPipelineDepthStencilStateCreateInfo depthStencilState =
4721 vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
4726 vk::VK_COMPARE_OP_ALWAYS,
4730 vk::VK_STENCIL_OP_KEEP,
4731 vk::VK_STENCIL_OP_KEEP,
4732 vk::VK_STENCIL_OP_KEEP,
4733 vk::VK_COMPARE_OP_ALWAYS,
4739 vk::VK_STENCIL_OP_KEEP,
4740 vk::VK_STENCIL_OP_KEEP,
4741 vk::VK_STENCIL_OP_KEEP,
4742 vk::VK_COMPARE_OP_ALWAYS,
4750 const vk::VkVertexInputBindingDescription vertexBindingDescriptions[] =
4755 vk::VK_VERTEX_INPUT_RATE_VERTEX
4758 const vk::VkVertexInputAttributeDescription vertexAttributeDescriptions[] =
4763 vk::VK_FORMAT_R8G8_UNORM,
4767 const vk::VkPipelineVertexInputStateCreateInfo vertexInputState =
4769 vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
4773 m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexBindingDescriptions) : 0u,
4774 m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexBindingDescriptions : DE_NULL,
4776 m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexAttributeDescriptions) : 0u,
4777 m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexAttributeDescriptions : DE_NULL,
4779 const vk::VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
4781 vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
4784 vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
4787 const vk::VkViewport viewports[] =
4789 { 0.0f, 0.0f, (float)context.getTargetWidth(), (float)context.getTargetHeight(), 0.0f, 1.0f }
4791 const vk::VkRect2D scissors[] =
4793 { { 0, 0 }, { (deUint32)context.getTargetWidth(), (deUint32)context.getTargetHeight() } }
4795 const vk::VkPipelineViewportStateCreateInfo viewportState =
4797 vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
4800 DE_LENGTH_OF_ARRAY(viewports),
4802 DE_LENGTH_OF_ARRAY(scissors),
4805 const vk::VkPipelineRasterizationStateCreateInfo rasterState =
4807 vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
4813 vk::VK_POLYGON_MODE_FILL,
4814 vk::VK_CULL_MODE_NONE,
4815 vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,
4822 const vk::VkSampleMask sampleMask = ~0u;
4823 const vk::VkPipelineMultisampleStateCreateInfo multisampleState =
4825 vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
4829 vk::VK_SAMPLE_COUNT_1_BIT,
4836 const vk::VkPipelineColorBlendAttachmentState attachments[] =
4840 vk::VK_BLEND_FACTOR_ONE,
4841 vk::VK_BLEND_FACTOR_ZERO,
4842 vk::VK_BLEND_OP_ADD,
4843 vk::VK_BLEND_FACTOR_ONE,
4844 vk::VK_BLEND_FACTOR_ZERO,
4845 vk::VK_BLEND_OP_ADD,
4846 (vk::VK_COLOR_COMPONENT_R_BIT|
4847 vk::VK_COLOR_COMPONENT_G_BIT|
4848 vk::VK_COLOR_COMPONENT_B_BIT|
4849 vk::VK_COLOR_COMPONENT_A_BIT)
4852 const vk::VkPipelineColorBlendStateCreateInfo colorBlendState =
4854 vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
4859 vk::VK_LOGIC_OP_COPY,
4860 DE_LENGTH_OF_ARRAY(attachments),
4862 { 0.0f, 0.0f, 0.0f, 0.0f }
4864 const vk::VkGraphicsPipelineCreateInfo createInfo =
4866 vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
4870 DE_LENGTH_OF_ARRAY(shaderStages),
4874 &inputAssemblyState,
4889 m_pipeline = vk::createGraphicsPipeline(vkd, device, 0, &createInfo);
4893 void RenderBuffer::submit (SubmitContext& context)
4895 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4896 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4897 const vk::VkDeviceSize offset = 0;
4898 const vk::VkBuffer buffer = context.getBuffer();
4900 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4902 if (m_renderAs == RENDERAS_VERTEX_BUFFER)
4904 vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
4905 vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
4907 else if (m_renderAs == RENDERAS_INDEX_BUFFER)
4909 vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
4910 vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
4913 DE_FATAL("Unknown renderAs");
4916 void RenderBuffer::verify (VerifyRenderPassContext& context, size_t)
4918 for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4920 const deUint8 x = context.getReference().get(pos * 2);
4921 const deUint8 y = context.getReference().get((pos * 2) + 1);
4923 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4941 OP_BUFFER_BINDMEMORY,
4943 OP_QUEUE_WAIT_FOR_IDLE,
4944 OP_DEVICE_WAIT_FOR_IDLE,
4946 OP_COMMAND_BUFFER_BEGIN,
4947 OP_COMMAND_BUFFER_END,
4949 // Buffer transfer operations
4953 OP_BUFFER_COPY_TO_BUFFER,
4954 OP_BUFFER_COPY_FROM_BUFFER,
4956 OP_BUFFER_COPY_TO_IMAGE,
4957 OP_BUFFER_COPY_FROM_IMAGE,
4961 OP_IMAGE_BINDMEMORY,
4963 OP_IMAGE_TRANSITION_LAYOUT,
4965 OP_IMAGE_COPY_TO_BUFFER,
4966 OP_IMAGE_COPY_FROM_BUFFER,
4968 OP_IMAGE_COPY_TO_IMAGE,
4969 OP_IMAGE_COPY_FROM_IMAGE,
4971 OP_IMAGE_BLIT_TO_IMAGE,
4972 OP_IMAGE_BLIT_FROM_IMAGE,
4976 OP_PIPELINE_BARRIER_GLOBAL,
4977 OP_PIPELINE_BARRIER_BUFFER,
4978 OP_PIPELINE_BARRIER_IMAGE,
4980 // Renderpass operations
4981 OP_RENDERPASS_BEGIN,
4984 // Commands inside render pass
4985 OP_RENDER_VERTEX_BUFFER,
4986 OP_RENDER_INDEX_BUFFER
4992 STAGE_COMMAND_BUFFER,
4997 vk::VkAccessFlags getWriteAccessFlags (void)
4999 return vk::VK_ACCESS_SHADER_WRITE_BIT
5000 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
5001 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
5002 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
5003 | vk::VK_ACCESS_HOST_WRITE_BIT
5004 | vk::VK_ACCESS_MEMORY_WRITE_BIT;
5007 bool isWriteAccess (vk::VkAccessFlagBits access)
5009 return (getWriteAccessFlags() & access) != 0;
5015 CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
5017 bool isValid (vk::VkPipelineStageFlagBits stage,
5018 vk::VkAccessFlagBits access) const;
5020 void perform (vk::VkPipelineStageFlagBits stage,
5021 vk::VkAccessFlagBits access);
5023 void submitCommandBuffer (void);
5024 void waitForIdle (void);
5026 void getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5027 vk::VkAccessFlags& srcAccesses,
5028 vk::VkPipelineStageFlags& dstStages,
5029 vk::VkAccessFlags& dstAccesses) const;
5031 void barrier (vk::VkPipelineStageFlags srcStages,
5032 vk::VkAccessFlags srcAccesses,
5033 vk::VkPipelineStageFlags dstStages,
5034 vk::VkAccessFlags dstAccesses);
5036 void imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5037 vk::VkAccessFlags srcAccesses,
5038 vk::VkPipelineStageFlags dstStages,
5039 vk::VkAccessFlags dstAccesses);
5041 void checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5042 vk::VkAccessFlags srcAccesses,
5043 vk::VkPipelineStageFlags dstStages,
5044 vk::VkAccessFlags dstAccesses);
5046 // Everything is clean and there is no need for barriers
5047 bool isClean (void) const;
5049 vk::VkPipelineStageFlags getAllowedStages (void) const { return m_allowedStages; }
5050 vk::VkAccessFlags getAllowedAcceses (void) const { return m_allowedAccesses; }
5052 // Limit which stages and accesses are used by the CacheState tracker
5053 const vk::VkPipelineStageFlags m_allowedStages;
5054 const vk::VkAccessFlags m_allowedAccesses;
5056 // [dstStage][srcStage] = srcAccesses
5057 // In stage dstStage write srcAccesses from srcStage are not yet available
5058 vk::VkAccessFlags m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5059 // Latest pipeline transition is not available in stage
5060 bool m_unavailableLayoutTransition[PIPELINESTAGE_LAST];
5061 // [dstStage] = dstAccesses
5062 // In stage dstStage ops with dstAccesses are not yet visible
5063 vk::VkAccessFlags m_invisibleOperations[PIPELINESTAGE_LAST];
5065 // [dstStage] = srcStage
5066 // Memory operation in srcStage have not completed before dstStage
5067 vk::VkPipelineStageFlags m_incompleteOperations[PIPELINESTAGE_LAST];
5070 CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
5071 : m_allowedStages (allowedStages)
5072 , m_allowedAccesses (allowedAccesses)
5074 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5076 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5078 if ((dstStage_ & m_allowedStages) == 0)
5081 // All operations are initially visible
5082 m_invisibleOperations[dstStage] = 0;
5084 // There are no incomplete read operations initially
5085 m_incompleteOperations[dstStage] = 0;
5087 // There are no incomplete layout transitions
5088 m_unavailableLayoutTransition[dstStage] = false;
5090 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5092 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5094 if ((srcStage_ & m_allowedStages) == 0)
5097 // There are no write operations that are not yet available
5099 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5104 bool CacheState::isValid (vk::VkPipelineStageFlagBits stage,
5105 vk::VkAccessFlagBits access) const
5107 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5108 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5110 const PipelineStage dstStage = pipelineStageFlagToPipelineStage(stage);
5112 // Previous operations are not visible to access on stage
5113 if (m_unavailableLayoutTransition[dstStage] || (m_invisibleOperations[dstStage] & access) != 0)
5116 if (isWriteAccess(access))
5118 // Memory operations from other stages have not completed before
5120 if (m_incompleteOperations[dstStage] != 0)
5127 void CacheState::perform (vk::VkPipelineStageFlagBits stage,
5128 vk::VkAccessFlagBits access)
5130 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5131 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5133 const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
5135 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5137 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5139 if ((dstStage_ & m_allowedStages) == 0)
5142 // Mark stage as incomplete for all stages
5143 m_incompleteOperations[dstStage] |= stage;
5145 if (isWriteAccess(access))
5147 // Mark all accesses from all stages invisible
5148 m_invisibleOperations[dstStage] |= m_allowedAccesses;
5150 // Mark write access from srcStage unavailable to all stages
5151 m_unavailableWriteOperations[dstStage][srcStage] |= access;
5156 void CacheState::submitCommandBuffer (void)
5158 // Flush all host writes and reads
5159 barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
5160 m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
5165 void CacheState::waitForIdle (void)
5167 // Make all writes available
5168 barrier(m_allowedStages,
5169 m_allowedAccesses & getWriteAccessFlags(),
5173 // Make all writes visible on device side
5174 barrier(m_allowedStages,
5176 m_allowedStages & (~vk::VK_PIPELINE_STAGE_HOST_BIT),
5180 void CacheState::getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5181 vk::VkAccessFlags& srcAccesses,
5182 vk::VkPipelineStageFlags& dstStages,
5183 vk::VkAccessFlags& dstAccesses) const
5190 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5192 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5194 if ((dstStage_ & m_allowedStages) == 0)
5197 // Make sure all previous operation are complete in all stages
5198 if (m_incompleteOperations[dstStage])
5200 dstStages |= dstStage_;
5201 srcStages |= m_incompleteOperations[dstStage];
5204 // Make sure all read operations are visible in dstStage
5205 if (m_invisibleOperations[dstStage])
5207 dstStages |= dstStage_;
5208 dstAccesses |= m_invisibleOperations[dstStage];
5211 // Make sure all write operations fro mall stages are available
5212 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5214 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5216 if ((srcStage_ & m_allowedStages) == 0)
5219 if (m_unavailableWriteOperations[dstStage][srcStage])
5221 dstStages |= dstStage_;
5222 srcStages |= dstStage_;
5223 srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
5226 if (m_unavailableLayoutTransition[dstStage] && !m_unavailableLayoutTransition[srcStage])
5228 // Add dependency between srcStage and dstStage if layout transition has not completed in dstStage,
5229 // but has completed in srcStage.
5230 dstStages |= dstStage_;
5231 srcStages |= dstStage_;
5236 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5237 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5238 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5239 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5242 void CacheState::checkImageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5243 vk::VkAccessFlags srcAccesses,
5244 vk::VkPipelineStageFlags dstStages,
5245 vk::VkAccessFlags dstAccesses)
5247 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5248 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5249 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5250 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5252 DE_UNREF(srcStages);
5253 DE_UNREF(srcAccesses);
5255 DE_UNREF(dstStages);
5256 DE_UNREF(dstAccesses);
5258 #if defined(DE_DEBUG)
5259 // Check that all stages have completed before srcStages or are in srcStages.
5261 vk::VkPipelineStageFlags completedStages = srcStages;
5263 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5265 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5267 if ((srcStage_ & srcStages) == 0)
5270 completedStages |= (~m_incompleteOperations[srcStage]);
5273 DE_ASSERT((completedStages & m_allowedStages) == m_allowedStages);
5276 // Check that any write is available at least in one stage. Since all stages are complete even single flush is enough.
5277 if ((getWriteAccessFlags() & m_allowedAccesses) != 0 && (srcAccesses & getWriteAccessFlags()) == 0)
5279 bool anyWriteAvailable = false;
5281 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5283 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5285 if ((dstStage_ & m_allowedStages) == 0)
5288 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5290 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5292 if ((srcStage_ & m_allowedStages) == 0)
5295 if (m_unavailableWriteOperations[dstStage][srcStage] != (getWriteAccessFlags() & m_allowedAccesses))
5297 anyWriteAvailable = true;
5303 DE_ASSERT(anyWriteAvailable);
5308 void CacheState::imageLayoutBarrier (vk::VkPipelineStageFlags srcStages,
5309 vk::VkAccessFlags srcAccesses,
5310 vk::VkPipelineStageFlags dstStages,
5311 vk::VkAccessFlags dstAccesses)
5313 checkImageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
5315 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5317 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5319 if ((dstStage_ & m_allowedStages) == 0)
5322 // All stages are incomplete after the barrier except each dstStage in it self.
5323 m_incompleteOperations[dstStage] = m_allowedStages & (~dstStage_);
5325 // All memory operations are invisible unless they are listed in dstAccess
5326 m_invisibleOperations[dstStage] = m_allowedAccesses & (~dstAccesses);
5328 // Layout transition is unavailable in stage unless it was listed in dstStages
5329 m_unavailableLayoutTransition[dstStage]= (dstStage_ & dstStages) == 0;
5331 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5333 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5335 if ((srcStage_ & m_allowedStages) == 0)
5338 // All write operations are available after layout transition
5339 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5344 void CacheState::barrier (vk::VkPipelineStageFlags srcStages,
5345 vk::VkAccessFlags srcAccesses,
5346 vk::VkPipelineStageFlags dstStages,
5347 vk::VkAccessFlags dstAccesses)
5349 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5350 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5351 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5352 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5356 vk::VkPipelineStageFlags oldIncompleteOperations[PIPELINESTAGE_LAST];
5357 vk::VkAccessFlags oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5358 bool oldUnavailableLayoutTransition[PIPELINESTAGE_LAST];
5360 deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
5361 deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
5362 deMemcpy(oldUnavailableLayoutTransition, m_unavailableLayoutTransition, sizeof(oldUnavailableLayoutTransition));
5364 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5366 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5368 if ((srcStage_ & srcStages) == 0)
5371 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5373 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5375 if ((dstStage_ & dstStages) == 0)
5378 // Stages that have completed before srcStage have also completed before dstStage
5379 m_incompleteOperations[dstStage] &= oldIncompleteOperations[srcStage];
5381 // Image layout transition in srcStage are now available in dstStage
5382 m_unavailableLayoutTransition[dstStage] &= oldUnavailableLayoutTransition[srcStage];
5384 for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
5386 const PipelineStage sharedStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
5388 if ((sharedStage_ & m_allowedStages) == 0)
5391 // Writes that are available in srcStage are also available in dstStage
5392 m_unavailableWriteOperations[dstStage][sharedStage] &= oldUnavailableWriteOperations[srcStage][sharedStage];
5399 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5401 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5402 bool allWritesAvailable = true;
5404 if ((dstStage_ & dstStages) == 0)
5407 // Operations in srcStages have completed before any stage in dstStages
5408 m_incompleteOperations[dstStage] &= ~srcStages;
5410 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5412 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5414 if ((srcStage_ & m_allowedStages) == 0)
5417 // Make srcAccesses from srcStage available in dstStage
5418 if ((srcStage_ & srcStages) != 0)
5419 m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
5421 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5422 allWritesAvailable = false;
5425 // If all writes are available in dstStage make dstAccesses also visible
5426 if (allWritesAvailable)
5427 m_invisibleOperations[dstStage] &= ~dstAccesses;
5431 bool CacheState::isClean (void) const
5433 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5435 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5437 if ((dstStage_ & m_allowedStages) == 0)
5440 // Some operations are not visible to some stages
5441 if (m_invisibleOperations[dstStage] != 0)
5444 // There are operation that have not completed yet
5445 if (m_incompleteOperations[dstStage] != 0)
5448 // Layout transition has not completed yet
5449 if (m_unavailableLayoutTransition[dstStage])
5452 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5454 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5456 if ((srcStage_ & m_allowedStages) == 0)
5459 // Some write operations are not available yet
5460 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5470 State (Usage usage, deUint32 seed)
5471 : stage (STAGE_HOST)
5472 , cache (usageToStageFlags(usage), usageToAccessFlags(usage))
5475 , hostInvalidated (true)
5476 , hostFlushed (true)
5477 , memoryDefined (false)
5479 , hasBoundBufferMemory (false)
5481 , hasBoundImageMemory (false)
5482 , imageLayout (vk::VK_IMAGE_LAYOUT_UNDEFINED)
5483 , imageDefined (false)
5486 , commandBufferIsEmpty (true)
5495 bool hostInvalidated;
5500 bool hasBoundBufferMemory;
5503 bool hasBoundImageMemory;
5504 vk::VkImageLayout imageLayout;
5510 bool commandBufferIsEmpty;
5513 void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
5515 if (state.stage == STAGE_HOST)
5517 if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
5519 // Host memory operations
5522 ops.push_back(OP_UNMAP);
5524 // Avoid flush and finish if they are not needed
5525 if (!state.hostFlushed)
5526 ops.push_back(OP_MAP_FLUSH);
5528 if (!state.hostInvalidated
5530 && ((usage & USAGE_HOST_READ) == 0
5531 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5532 && ((usage & USAGE_HOST_WRITE) == 0
5533 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
5535 ops.push_back(OP_MAP_INVALIDATE);
5538 if (usage & USAGE_HOST_READ
5539 && usage & USAGE_HOST_WRITE
5540 && state.memoryDefined
5541 && state.hostInvalidated
5543 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
5544 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5546 ops.push_back(OP_MAP_MODIFY);
5549 if (usage & USAGE_HOST_READ
5550 && state.memoryDefined
5551 && state.hostInvalidated
5553 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5555 ops.push_back(OP_MAP_READ);
5558 if (usage & USAGE_HOST_WRITE
5559 && state.hostInvalidated
5561 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
5563 ops.push_back(OP_MAP_WRITE);
5567 ops.push_back(OP_MAP);
5570 if (state.hasBoundBufferMemory && state.queueIdle)
5572 // \note Destroy only buffers after they have been bound
5573 ops.push_back(OP_BUFFER_DESTROY);
5577 if (state.hasBuffer)
5579 if (!state.hasBoundBufferMemory)
5580 ops.push_back(OP_BUFFER_BINDMEMORY);
5582 else if (!state.hasImage && supportsBuffers) // Avoid creating buffer if there is already image
5583 ops.push_back(OP_BUFFER_CREATE);
5586 if (state.hasBoundImageMemory && state.queueIdle)
5588 // \note Destroy only image after they have been bound
5589 ops.push_back(OP_IMAGE_DESTROY);
5595 if (!state.hasBoundImageMemory)
5596 ops.push_back(OP_IMAGE_BINDMEMORY);
5598 else if (!state.hasBuffer && supportsImages) // Avoid creating image if there is already buffer
5599 ops.push_back(OP_IMAGE_CREATE);
5602 // Host writes must be flushed before GPU commands and there must be
5603 // buffer or image for GPU commands
5604 if (state.hostFlushed
5605 && (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
5606 && (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
5607 && (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
5609 ops.push_back(OP_COMMAND_BUFFER_BEGIN);
5612 if (!state.deviceIdle)
5613 ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
5615 if (!state.queueIdle)
5616 ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
5618 else if (state.stage == STAGE_COMMAND_BUFFER)
5620 if (!state.cache.isClean())
5622 ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
5625 ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
5627 if (state.hasBuffer)
5628 ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
5631 if (state.hasBoundBufferMemory)
5633 if (usage & USAGE_TRANSFER_DST
5634 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5636 ops.push_back(OP_BUFFER_FILL);
5637 ops.push_back(OP_BUFFER_UPDATE);
5638 ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
5639 ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
5642 if (usage & USAGE_TRANSFER_SRC
5643 && state.memoryDefined
5644 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5646 ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
5647 ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
5651 if (state.hasBoundImageMemory)
5653 ops.push_back(OP_IMAGE_TRANSITION_LAYOUT);
5656 if (usage & USAGE_TRANSFER_DST
5657 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
5658 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
5659 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5661 ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
5662 ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
5663 ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
5666 if (usage & USAGE_TRANSFER_SRC
5667 && (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
5668 || state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
5669 && state.imageDefined
5670 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5672 ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
5673 ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
5674 ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
5679 // \todo [2016-03-09 mika] Add other usages?
5680 if (((usage & USAGE_VERTEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5681 || ((usage & USAGE_INDEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT)))
5682 ops.push_back(OP_RENDERPASS_BEGIN);
5684 // \note This depends on previous operations and has to be always the
5685 // last command buffer operation check
5686 if (ops.empty() || !state.commandBufferIsEmpty)
5687 ops.push_back(OP_COMMAND_BUFFER_END);
5689 else if (state.stage == STAGE_RENDER_PASS)
5691 if (usage & USAGE_VERTEX_BUFFER
5692 && state.memoryDefined
5693 && state.hasBoundBufferMemory
5694 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5696 ops.push_back(OP_RENDER_VERTEX_BUFFER);
5699 if (usage & USAGE_INDEX_BUFFER
5700 && state.memoryDefined
5701 && state.hasBoundBufferMemory
5702 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
5704 ops.push_back(OP_RENDER_INDEX_BUFFER);
5707 ops.push_back(OP_RENDERPASS_END);
5710 DE_FATAL("Unknown stage");
5713 bool layoutSupportedByUsage (Usage usage, vk::VkImageLayout layout)
5717 case vk::VK_IMAGE_LAYOUT_GENERAL:
5720 case vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
5721 return (usage & USAGE_COLOR_ATTACHMENT) != 0;
5723 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
5724 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
5726 case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
5727 return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
5729 case vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
5730 // \todo [2016-03-09 mika] Should include input attachment
5731 return (usage & USAGE_TEXTURE_SAMPLED) != 0;
5733 case vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
5734 return (usage & USAGE_TRANSFER_SRC) != 0;
5736 case vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
5737 return (usage & USAGE_TRANSFER_DST) != 0;
5739 case vk::VK_IMAGE_LAYOUT_PREINITIALIZED:
5743 DE_FATAL("Unknown layout");
5748 vk::VkImageLayout getRandomNextLayout (de::Random& rng,
5750 vk::VkImageLayout previousLayout)
5752 const vk::VkImageLayout layouts[] =
5754 vk::VK_IMAGE_LAYOUT_GENERAL,
5755 vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
5756 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
5757 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
5758 vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
5759 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
5760 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
5762 size_t possibleLayoutCount = 0;
5764 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
5766 const vk::VkImageLayout layout = layouts[layoutNdx];
5768 if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
5769 possibleLayoutCount++;
5772 size_t nextLayoutNdx = ((size_t)rng.getUint64()) % possibleLayoutCount;
5774 for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
5776 const vk::VkImageLayout layout = layouts[layoutNdx];
5778 if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
5780 if (nextLayoutNdx == 0)
5787 DE_FATAL("Unreachable");
5788 return vk::VK_IMAGE_LAYOUT_UNDEFINED;
5791 void applyOp (State& state, const Memory& memory, Op op, Usage usage)
5796 DE_ASSERT(state.stage == STAGE_HOST);
5797 DE_ASSERT(!state.mapped);
5798 state.mapped = true;
5802 DE_ASSERT(state.stage == STAGE_HOST);
5803 DE_ASSERT(state.mapped);
5804 state.mapped = false;
5808 DE_ASSERT(state.stage == STAGE_HOST);
5809 DE_ASSERT(!state.hostFlushed);
5810 state.hostFlushed = true;
5813 case OP_MAP_INVALIDATE:
5814 DE_ASSERT(state.stage == STAGE_HOST);
5815 DE_ASSERT(!state.hostInvalidated);
5816 state.hostInvalidated = true;
5820 DE_ASSERT(state.stage == STAGE_HOST);
5821 DE_ASSERT(state.hostInvalidated);
5822 state.rng.getUint32();
5826 DE_ASSERT(state.stage == STAGE_HOST);
5827 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5828 state.hostFlushed = false;
5830 state.memoryDefined = true;
5831 state.imageDefined = false;
5832 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5833 state.rng.getUint32();
5837 DE_ASSERT(state.stage == STAGE_HOST);
5838 DE_ASSERT(state.hostInvalidated);
5840 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5841 state.hostFlushed = false;
5843 state.rng.getUint32();
5846 case OP_BUFFER_CREATE:
5847 DE_ASSERT(state.stage == STAGE_HOST);
5848 DE_ASSERT(!state.hasBuffer);
5850 state.hasBuffer = true;
5853 case OP_BUFFER_DESTROY:
5854 DE_ASSERT(state.stage == STAGE_HOST);
5855 DE_ASSERT(state.hasBuffer);
5856 DE_ASSERT(state.hasBoundBufferMemory);
5858 state.hasBuffer = false;
5859 state.hasBoundBufferMemory = false;
5862 case OP_BUFFER_BINDMEMORY:
5863 DE_ASSERT(state.stage == STAGE_HOST);
5864 DE_ASSERT(state.hasBuffer);
5865 DE_ASSERT(!state.hasBoundBufferMemory);
5867 state.hasBoundBufferMemory = true;
5870 case OP_IMAGE_CREATE:
5871 DE_ASSERT(state.stage == STAGE_HOST);
5872 DE_ASSERT(!state.hasImage);
5873 DE_ASSERT(!state.hasBuffer);
5875 state.hasImage = true;
5878 case OP_IMAGE_DESTROY:
5879 DE_ASSERT(state.stage == STAGE_HOST);
5880 DE_ASSERT(state.hasImage);
5881 DE_ASSERT(state.hasBoundImageMemory);
5883 state.hasImage = false;
5884 state.hasBoundImageMemory = false;
5885 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5886 state.imageDefined = false;
5889 case OP_IMAGE_BINDMEMORY:
5890 DE_ASSERT(state.stage == STAGE_HOST);
5891 DE_ASSERT(state.hasImage);
5892 DE_ASSERT(!state.hasBoundImageMemory);
5894 state.hasBoundImageMemory = true;
5897 case OP_IMAGE_TRANSITION_LAYOUT:
5899 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5900 DE_ASSERT(state.hasImage);
5901 DE_ASSERT(state.hasBoundImageMemory);
5903 // \todo [2016-03-09 mika] Support linear tiling and predefined data
5904 const vk::VkImageLayout srcLayout = state.rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
5905 const vk::VkImageLayout dstLayout = getRandomNextLayout(state.rng, usage, srcLayout);
5907 vk::VkPipelineStageFlags dirtySrcStages;
5908 vk::VkAccessFlags dirtySrcAccesses;
5909 vk::VkPipelineStageFlags dirtyDstStages;
5910 vk::VkAccessFlags dirtyDstAccesses;
5912 vk::VkPipelineStageFlags srcStages;
5913 vk::VkAccessFlags srcAccesses;
5914 vk::VkPipelineStageFlags dstStages;
5915 vk::VkAccessFlags dstAccesses;
5917 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
5919 // Try masking some random bits
5920 srcStages = dirtySrcStages;
5921 srcAccesses = dirtySrcAccesses;
5923 dstStages = state.cache.getAllowedStages() & state.rng.getUint32();
5924 dstAccesses = state.cache.getAllowedAcceses() & state.rng.getUint32();
5926 // If there are no bits in dst stage mask use all stages
5927 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
5930 srcStages = dstStages;
5932 if (srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
5933 state.imageDefined = false;
5935 state.commandBufferIsEmpty = false;
5936 state.imageLayout = dstLayout;
5937 state.memoryDefined = false;
5938 state.cache.imageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
5942 case OP_QUEUE_WAIT_FOR_IDLE:
5943 DE_ASSERT(state.stage == STAGE_HOST);
5944 DE_ASSERT(!state.queueIdle);
5946 state.queueIdle = true;
5948 state.cache.waitForIdle();
5951 case OP_DEVICE_WAIT_FOR_IDLE:
5952 DE_ASSERT(state.stage == STAGE_HOST);
5953 DE_ASSERT(!state.deviceIdle);
5955 state.queueIdle = true;
5956 state.deviceIdle = true;
5958 state.cache.waitForIdle();
5961 case OP_COMMAND_BUFFER_BEGIN:
5962 DE_ASSERT(state.stage == STAGE_HOST);
5963 state.stage = STAGE_COMMAND_BUFFER;
5964 state.commandBufferIsEmpty = true;
5965 // Makes host writes visible to command buffer
5966 state.cache.submitCommandBuffer();
5969 case OP_COMMAND_BUFFER_END:
5970 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5971 state.stage = STAGE_HOST;
5972 state.queueIdle = false;
5973 state.deviceIdle = false;
5976 case OP_BUFFER_COPY_FROM_BUFFER:
5977 case OP_BUFFER_COPY_FROM_IMAGE:
5978 case OP_BUFFER_UPDATE:
5979 case OP_BUFFER_FILL:
5980 state.rng.getUint32();
5981 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5983 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5984 state.hostInvalidated = false;
5986 state.commandBufferIsEmpty = false;
5987 state.memoryDefined = true;
5988 state.imageDefined = false;
5989 state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
5990 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
5993 case OP_BUFFER_COPY_TO_BUFFER:
5994 case OP_BUFFER_COPY_TO_IMAGE:
5995 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5997 state.commandBufferIsEmpty = false;
5998 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
6001 case OP_IMAGE_BLIT_FROM_IMAGE:
6002 state.rng.getBool();
6004 case OP_IMAGE_COPY_FROM_BUFFER:
6005 case OP_IMAGE_COPY_FROM_IMAGE:
6006 state.rng.getUint32();
6007 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6009 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
6010 state.hostInvalidated = false;
6012 state.commandBufferIsEmpty = false;
6013 state.memoryDefined = false;
6014 state.imageDefined = true;
6015 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
6018 case OP_IMAGE_BLIT_TO_IMAGE:
6019 state.rng.getBool();
6021 case OP_IMAGE_COPY_TO_BUFFER:
6022 case OP_IMAGE_COPY_TO_IMAGE:
6023 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6025 state.commandBufferIsEmpty = false;
6026 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
6029 case OP_PIPELINE_BARRIER_GLOBAL:
6030 case OP_PIPELINE_BARRIER_BUFFER:
6031 case OP_PIPELINE_BARRIER_IMAGE:
6033 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6035 vk::VkPipelineStageFlags dirtySrcStages;
6036 vk::VkAccessFlags dirtySrcAccesses;
6037 vk::VkPipelineStageFlags dirtyDstStages;
6038 vk::VkAccessFlags dirtyDstAccesses;
6040 vk::VkPipelineStageFlags srcStages;
6041 vk::VkAccessFlags srcAccesses;
6042 vk::VkPipelineStageFlags dstStages;
6043 vk::VkAccessFlags dstAccesses;
6045 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6047 // Try masking some random bits
6048 srcStages = dirtySrcStages & state.rng.getUint32();
6049 srcAccesses = dirtySrcAccesses & state.rng.getUint32();
6051 dstStages = dirtyDstStages & state.rng.getUint32();
6052 dstAccesses = dirtyDstAccesses & state.rng.getUint32();
6054 // If there are no bits in stage mask use the original dirty stages
6055 srcStages = srcStages ? srcStages : dirtySrcStages;
6056 dstStages = dstStages ? dstStages : dirtyDstStages;
6059 srcStages = dstStages;
6061 state.commandBufferIsEmpty = false;
6062 state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
6066 case OP_RENDERPASS_BEGIN:
6068 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6070 state.stage = STAGE_RENDER_PASS;
6074 case OP_RENDERPASS_END:
6076 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6078 state.stage = STAGE_COMMAND_BUFFER;
6082 case OP_RENDER_VERTEX_BUFFER:
6084 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6086 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
6090 case OP_RENDER_INDEX_BUFFER:
6092 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
6094 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
6099 DE_FATAL("Unknown op");
6103 de::MovePtr<Command> createHostCommand (Op op,
6106 vk::VkSharingMode sharing)
6110 case OP_MAP: return de::MovePtr<Command>(new Map());
6111 case OP_UNMAP: return de::MovePtr<Command>(new UnMap());
6113 case OP_MAP_FLUSH: return de::MovePtr<Command>(new Flush());
6114 case OP_MAP_INVALIDATE: return de::MovePtr<Command>(new Invalidate());
6116 case OP_MAP_READ: return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
6117 case OP_MAP_WRITE: return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
6118 case OP_MAP_MODIFY: return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
6120 case OP_BUFFER_CREATE: return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
6121 case OP_BUFFER_DESTROY: return de::MovePtr<Command>(new DestroyBuffer());
6122 case OP_BUFFER_BINDMEMORY: return de::MovePtr<Command>(new BindBufferMemory());
6124 case OP_IMAGE_CREATE: return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
6125 case OP_IMAGE_DESTROY: return de::MovePtr<Command>(new DestroyImage());
6126 case OP_IMAGE_BINDMEMORY: return de::MovePtr<Command>(new BindImageMemory());
6128 case OP_QUEUE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new QueueWaitIdle());
6129 case OP_DEVICE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new DeviceWaitIdle());
6132 DE_FATAL("Unknown op");
6133 return de::MovePtr<Command>(DE_NULL);
6137 de::MovePtr<CmdCommand> createCmdCommand (de::Random& rng,
6144 case OP_BUFFER_FILL: return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
6145 case OP_BUFFER_UPDATE: return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
6146 case OP_BUFFER_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
6147 case OP_BUFFER_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
6149 case OP_BUFFER_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyToImage());
6150 case OP_BUFFER_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
6152 case OP_IMAGE_TRANSITION_LAYOUT:
6154 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
6155 DE_ASSERT(state.hasImage);
6156 DE_ASSERT(state.hasBoundImageMemory);
6158 const vk::VkImageLayout srcLayout = rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
6159 const vk::VkImageLayout dstLayout = getRandomNextLayout(rng, usage, srcLayout);
6161 vk::VkPipelineStageFlags dirtySrcStages;
6162 vk::VkAccessFlags dirtySrcAccesses;
6163 vk::VkPipelineStageFlags dirtyDstStages;
6164 vk::VkAccessFlags dirtyDstAccesses;
6166 vk::VkPipelineStageFlags srcStages;
6167 vk::VkAccessFlags srcAccesses;
6168 vk::VkPipelineStageFlags dstStages;
6169 vk::VkAccessFlags dstAccesses;
6171 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6173 // Try masking some random bits
6174 srcStages = dirtySrcStages;
6175 srcAccesses = dirtySrcAccesses;
6177 dstStages = state.cache.getAllowedStages() & rng.getUint32();
6178 dstAccesses = state.cache.getAllowedAcceses() & rng.getUint32();
6180 // If there are no bits in dst stage mask use all stages
6181 dstStages = dstStages ? dstStages : state.cache.getAllowedStages();
6184 srcStages = dstStages;
6186 return de::MovePtr<CmdCommand>(new ImageTransition(srcStages, srcAccesses, dstStages, dstAccesses, srcLayout, dstLayout));
6189 case OP_IMAGE_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyToBuffer(state.imageLayout));
6190 case OP_IMAGE_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32(), state.imageLayout));
6191 case OP_IMAGE_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyToImage(state.imageLayout));
6192 case OP_IMAGE_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32(), state.imageLayout));
6193 case OP_IMAGE_BLIT_TO_IMAGE:
6195 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6196 return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale, state.imageLayout));
6199 case OP_IMAGE_BLIT_FROM_IMAGE:
6201 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
6202 return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale, state.imageLayout));
6205 case OP_PIPELINE_BARRIER_GLOBAL:
6206 case OP_PIPELINE_BARRIER_BUFFER:
6207 case OP_PIPELINE_BARRIER_IMAGE:
6209 vk::VkPipelineStageFlags dirtySrcStages;
6210 vk::VkAccessFlags dirtySrcAccesses;
6211 vk::VkPipelineStageFlags dirtyDstStages;
6212 vk::VkAccessFlags dirtyDstAccesses;
6214 vk::VkPipelineStageFlags srcStages;
6215 vk::VkAccessFlags srcAccesses;
6216 vk::VkPipelineStageFlags dstStages;
6217 vk::VkAccessFlags dstAccesses;
6219 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
6221 // Try masking some random bits
6222 srcStages = dirtySrcStages & rng.getUint32();
6223 srcAccesses = dirtySrcAccesses & rng.getUint32();
6225 dstStages = dirtyDstStages & rng.getUint32();
6226 dstAccesses = dirtyDstAccesses & rng.getUint32();
6228 // If there are no bits in stage mask use the original dirty stages
6229 srcStages = srcStages ? srcStages : dirtySrcStages;
6230 dstStages = dstStages ? dstStages : dirtyDstStages;
6233 srcStages = dstStages;
6235 PipelineBarrier::Type type;
6237 if (op == OP_PIPELINE_BARRIER_IMAGE)
6238 type = PipelineBarrier::TYPE_IMAGE;
6239 else if (op == OP_PIPELINE_BARRIER_BUFFER)
6240 type = PipelineBarrier::TYPE_BUFFER;
6241 else if (op == OP_PIPELINE_BARRIER_GLOBAL)
6242 type = PipelineBarrier::TYPE_GLOBAL;
6245 type = PipelineBarrier::TYPE_LAST;
6246 DE_FATAL("Unknown op");
6249 if (type == PipelineBarrier::TYPE_IMAGE)
6250 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::just(state.imageLayout)));
6252 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::nothing<vk::VkImageLayout>()));
6256 DE_FATAL("Unknown op");
6257 return de::MovePtr<CmdCommand>(DE_NULL);
6261 de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
6267 case OP_RENDER_VERTEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_VERTEX_BUFFER));
6268 case OP_RENDER_INDEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_INDEX_BUFFER));
6271 DE_FATAL("Unknown op");
6272 return de::MovePtr<RenderPassCommand>(DE_NULL);
6276 de::MovePtr<CmdCommand> createRenderPassCommands (const Memory& memory,
6277 de::Random& nextOpRng,
6283 vector<RenderPassCommand*> commands;
6287 for (; opNdx < opCount; opNdx++)
6291 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6293 DE_ASSERT(!ops.empty());
6296 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6298 if (op == OP_RENDERPASS_END)
6304 de::Random rng (state.rng);
6306 commands.push_back(createRenderPassCommand(rng, state, op).release());
6307 applyOp(state, memory, op, usage);
6309 DE_ASSERT(state.rng == rng);
6314 applyOp(state, memory, OP_RENDERPASS_END, usage);
6315 return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
6319 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6320 delete commands[commandNdx];
6326 de::MovePtr<Command> createCmdCommands (const Memory& memory,
6327 de::Random& nextOpRng,
6333 vector<CmdCommand*> commands;
6337 for (; opNdx < opCount; opNdx++)
6341 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6343 DE_ASSERT(!ops.empty());
6346 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6348 if (op == OP_COMMAND_BUFFER_END)
6354 // \note Command needs to known the state before the operation
6355 if (op == OP_RENDERPASS_BEGIN)
6357 applyOp(state, memory, op, usage);
6358 commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6362 de::Random rng (state.rng);
6364 commands.push_back(createCmdCommand(rng, state, op, usage).release());
6365 applyOp(state, memory, op, usage);
6367 DE_ASSERT(state.rng == rng);
6374 applyOp(state, memory, OP_COMMAND_BUFFER_END, usage);
6375 return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
6379 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6380 delete commands[commandNdx];
6386 void createCommands (vector<Command*>& commands,
6388 const Memory& memory,
6390 vk::VkSharingMode sharingMode)
6392 const size_t opCount = 100;
6393 State state (usage, seed);
6394 // Used to select next operation only
6395 de::Random nextOpRng (seed ^ 12930809);
6397 commands.reserve(opCount);
6399 for (size_t opNdx = 0; opNdx < opCount; opNdx++)
6403 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6405 DE_ASSERT(!ops.empty());
6408 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6410 if (op == OP_COMMAND_BUFFER_BEGIN)
6412 applyOp(state, memory, op, usage);
6413 commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6417 de::Random rng (state.rng);
6419 commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
6420 applyOp(state, memory, op, usage);
6422 // Make sure that random generator is in sync
6423 DE_ASSERT(state.rng == rng);
6428 // Clean up resources
6429 if (state.hasBuffer && state.hasImage)
6431 if (!state.queueIdle)
6432 commands.push_back(new QueueWaitIdle());
6434 if (state.hasBuffer)
6435 commands.push_back(new DestroyBuffer());
6438 commands.push_back(new DestroyImage());
6442 void testCommand (TestLog& log,
6443 tcu::ResultCollector& resultCollector,
6444 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection,
6445 const vk::InstanceInterface& vki,
6446 const vk::DeviceInterface& vkd,
6447 vk::VkPhysicalDevice physicalDevice,
6448 vk::VkDevice device,
6449 vk::VkDeviceSize size,
6450 deUint32 memoryTypeIndex,
6452 vk::VkSharingMode sharingMode,
6453 vk::VkQueue executionQueue,
6454 deUint32 executionQueueFamily,
6455 const vector<deUint32>& queueFamilies,
6456 const vk::VkDeviceSize maxBufferSize,
6457 const IVec2 maxImageSize)
6459 const deUint32 seed = 2830980989u;
6460 Memory memory (vki, vkd, physicalDevice, device, size, memoryTypeIndex, maxBufferSize, maxImageSize[0], maxImageSize[1]);
6461 vector<Command*> commands;
6462 vector<pair<deUint32, vk::VkQueue> > queues;
6466 log << TestLog::Message << "Create commands" << TestLog::EndMessage;
6467 createCommands(commands, seed, memory, usage, sharingMode);
6469 for (size_t queueNdx = 0; queueNdx < queueFamilies.size(); queueNdx++)
6473 vkd.getDeviceQueue(device, queueFamilies[queueNdx], 0, &queue);
6474 queues.push_back(std::make_pair(queueFamilies[queueNdx], queue));
6478 const tcu::ScopedLogSection section (log, "LogPrepare", "LogPrepare");
6480 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6481 commands[cmdNdx]->logPrepare(log, cmdNdx);
6485 const tcu::ScopedLogSection section (log, "LogExecute", "LogExecute");
6487 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6488 commands[cmdNdx]->logExecute(log, cmdNdx);
6492 const Context context (vki, vkd, physicalDevice, device, executionQueue, executionQueueFamily, queues, binaryCollection);
6497 PrepareContext prepareContext (context, memory);
6499 log << TestLog::Message << "Begin prepare" << TestLog::EndMessage;
6501 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6503 Command& command = *commands[cmdNdx];
6507 command.prepare(prepareContext);
6511 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare for execution");
6516 ExecuteContext executeContext (context);
6518 log << TestLog::Message << "Begin execution" << TestLog::EndMessage;
6520 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6522 Command& command = *commands[cmdNdx];
6526 command.execute(executeContext);
6530 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute");
6535 VK_CHECK(vkd.deviceWaitIdle(device));
6539 const tcu::ScopedLogSection section (log, "Verify", "Verify");
6540 VerifyContext verifyContext (log, resultCollector, context, size);
6542 log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
6544 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6546 Command& command = *commands[cmdNdx];
6550 command.verify(verifyContext, cmdNdx);
6554 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed verification");
6560 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6562 delete commands[commandNdx];
6563 commands[commandNdx] = DE_NULL;
6568 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6570 delete commands[commandNdx];
6571 commands[commandNdx] = DE_NULL;
6580 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6582 delete commands[commandNdx];
6583 commands[commandNdx] = DE_NULL;
6589 class MemoryTestInstance : public TestInstance
6593 MemoryTestInstance (::vkt::Context& context, const TestConfig& config);
6595 tcu::TestStatus iterate (void);
6598 const TestConfig m_config;
6599 const vk::VkPhysicalDeviceMemoryProperties m_memoryProperties;
6600 deUint32 m_memoryTypeNdx;
6601 tcu::ResultCollector m_resultCollector;
6604 MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
6605 : TestInstance (context)
6607 , m_memoryProperties (vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
6608 , m_memoryTypeNdx (0)
6609 , m_resultCollector (context.getTestContext().getLog())
6611 TestLog& log = context.getTestContext().getLog();
6613 const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
6615 log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
6616 log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
6617 log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
6621 const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
6623 for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
6625 const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
6627 log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
6628 log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
6631 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
6633 const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
6635 log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
6636 log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
6641 tcu::TestStatus MemoryTestInstance::iterate (void)
6643 // \todo [2016-03-09 mika] Split different stages over multiple iterations
6644 if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
6646 TestLog& log = m_context.getTestContext().getLog();
6647 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx), "Memory type " + de::toString(m_memoryTypeNdx));
6648 const vk::InstanceInterface& vki = m_context.getInstanceInterface();
6649 const vk::VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
6650 const vk::DeviceInterface& vkd = m_context.getDeviceInterface();
6651 const vk::VkDevice device = m_context.getDevice();
6652 const vk::VkQueue queue = m_context.getUniversalQueue();
6653 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
6654 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
6655 vector<deUint32> queues;
6657 queues.push_back(queueFamilyIndex);
6659 if (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)
6660 && !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
6662 log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
6665 return tcu::TestStatus::incomplete();
6671 const vk::VkBufferUsageFlags bufferUsage = usageToBufferUsageFlags(m_config.usage);
6672 const vk::VkImageUsageFlags imageUsage = usageToImageUsageFlags(m_config.usage);
6673 const vk::VkDeviceSize maxBufferSize = bufferUsage != 0
6674 ? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
6676 const IVec2 maxImageSize = imageUsage != 0
6677 ? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
6680 log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
6681 log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
6683 // Skip tests if there are no supported operations
6684 if (maxBufferSize == 0
6685 && maxImageSize[0] == 0
6686 && (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)) == 0)
6688 log << TestLog::Message << "Skipping memory type. None of the usages are supported." << TestLog::EndMessage;
6692 testCommand(log, m_resultCollector, m_context.getBinaryCollection(), vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, m_config.usage, m_config.sharing, queue, queueFamilyIndex, queues, maxBufferSize, maxImageSize);
6695 catch (const tcu::TestError& e)
6697 m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
6701 return tcu::TestStatus::incomplete();
6705 return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
6710 void init (vk::SourceCollections& sources, TestConfig config) const
6712 // Vertex buffer rendering
6713 if (config.usage & USAGE_VERTEX_BUFFER)
6715 const char* const vertexShader =
6717 "layout(location = 0) in highp vec2 a_position;\n"
6718 "void main (void) {\n"
6719 "\tgl_PointSize = 1.0;\n"
6720 "\tgl_Position = vec4(1.998 * a_position - vec2(0.999), 0.0, 1.0);\n"
6723 sources.glslSources.add("vertex-buffer.vert")
6724 << glu::VertexSource(vertexShader);
6727 // Index buffer rendering
6728 if (config.usage & USAGE_INDEX_BUFFER)
6730 const char* const vertexShader =
6733 "void main (void) {\n"
6734 "\tgl_PointSize = 1.0;\n"
6735 "\thighp vec2 pos = vec2(gl_VertexIndex % 256, gl_VertexIndex / 256) / vec2(255.0);\n"
6736 "\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
6739 sources.glslSources.add("index-buffer.vert")
6740 << glu::VertexSource(vertexShader);
6744 const char* const fragmentShader =
6746 "layout(location = 0) out highp vec4 o_color;\n"
6747 "void main (void) {\n"
6748 "\to_color = vec4(1.0);\n"
6751 sources.glslSources.add("render-white.frag")
6752 << glu::FragmentSource(fragmentShader);
6759 tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
6761 de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
6762 const vk::VkDeviceSize sizes[] =
6769 const Usage usages[] =
6775 USAGE_VERTEX_BUFFER,
6778 const Usage readUsages[] =
6782 USAGE_VERTEX_BUFFER,
6786 const Usage writeUsages[] =
6792 for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
6794 const Usage writeUsage = writeUsages[writeUsageNdx];
6796 for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
6798 const Usage readUsage = readUsages[readUsageNdx];
6799 const Usage usage = writeUsage | readUsage;
6800 const string usageGroupName (usageToName(usage));
6801 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6803 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6805 const vk::VkDeviceSize size = sizes[sizeNdx];
6806 const string testName (de::toString((deUint64)(size)));
6807 const TestConfig config =
6811 vk::VK_SHARING_MODE_EXCLUSIVE
6814 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6817 group->addChild(usageGroup.get());
6818 usageGroup.release();
6823 Usage all = (Usage)0;
6825 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
6826 all = all | usages[usageNdx];
6829 const string usageGroupName ("all");
6830 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6832 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6834 const vk::VkDeviceSize size = sizes[sizeNdx];
6835 const string testName (de::toString((deUint64)(size)));
6836 const TestConfig config =
6840 vk::VK_SHARING_MODE_EXCLUSIVE
6843 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6846 group->addChild(usageGroup.get());
6847 usageGroup.release();
6851 const string usageGroupName ("all_device");
6852 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6854 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6856 const vk::VkDeviceSize size = sizes[sizeNdx];
6857 const string testName (de::toString((deUint64)(size)));
6858 const TestConfig config =
6860 (Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
6862 vk::VK_SHARING_MODE_EXCLUSIVE
6865 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6868 group->addChild(usageGroup.get());
6869 usageGroup.release();
6873 return group.release();