1 /*-------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2015 Google Inc.
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and/or associated documentation files (the
9 * "Materials"), to deal in the Materials without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sublicense, and/or sell copies of the Materials, and to
12 * permit persons to whom the Materials are furnished to do so, subject to
13 * the following conditions:
15 * The above copyright notice(s) and this permission notice shall be
16 * included in all copies or substantial portions of the Materials.
18 * The Materials are Confidential Information as defined by the
19 * Khronos Membership Agreement until designated non-confidential by
20 * Khronos, at which point this condition clause shall be removed.
22 * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
23 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
24 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
25 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
26 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
27 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
28 * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
32 * \brief Pipeline barrier tests
33 *//*--------------------------------------------------------------------*/
35 #include "vktMemoryPipelineBarrierTests.hpp"
37 #include "vktTestCaseUtil.hpp"
40 #include "vkPlatform.hpp"
41 #include "vkRefUtil.hpp"
42 #include "vkQueryUtil.hpp"
43 #include "vkMemUtil.hpp"
44 #include "vkTypeUtil.hpp"
45 #include "vkPrograms.hpp"
47 #include "tcuMaybe.hpp"
48 #include "tcuTextureUtil.hpp"
49 #include "tcuTestLog.hpp"
50 #include "tcuResultCollector.hpp"
51 #include "tcuTexture.hpp"
52 #include "tcuImageCompare.hpp"
54 #include "deUniquePtr.hpp"
55 #include "deStringUtil.hpp"
56 #include "deRandom.hpp"
67 // \todo Check bufferImageGranularity
81 using tcu::ConstPixelBufferAccess;
82 using tcu::PixelBufferAccess;
83 using tcu::TextureFormat;
84 using tcu::TextureLevel;
94 ALL_PIPELINE_STAGES = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
95 | vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
96 | vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
97 | vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
98 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
99 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
100 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
101 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
102 | vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
103 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
104 | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
105 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
106 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT
107 | vk::VK_PIPELINE_STAGE_HOST_BIT
112 ALL_ACCESSES = vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT
113 | vk::VK_ACCESS_INDEX_READ_BIT
114 | vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
115 | vk::VK_ACCESS_UNIFORM_READ_BIT
116 | vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
117 | vk::VK_ACCESS_SHADER_READ_BIT
118 | vk::VK_ACCESS_SHADER_WRITE_BIT
119 | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
120 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
121 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
122 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
123 | vk::VK_ACCESS_TRANSFER_READ_BIT
124 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
125 | vk::VK_ACCESS_HOST_READ_BIT
126 | vk::VK_ACCESS_HOST_WRITE_BIT
127 | vk::VK_ACCESS_MEMORY_READ_BIT
128 | vk::VK_ACCESS_MEMORY_WRITE_BIT
133 // Mapped host read and write
134 USAGE_HOST_READ = (0x1u<<0),
135 USAGE_HOST_WRITE = (0x1u<<1),
137 // Copy and other transfer operations
138 USAGE_TRANSFER_SRC = (0x1u<<2),
139 USAGE_TRANSFER_DST = (0x1u<<3),
141 // Buffer usage flags
142 USAGE_INDEX_BUFFER = (0x1u<<4),
143 USAGE_VERTEX_BUFFER = (0x1u<<5),
145 USAGE_UNIFORM_BUFFER = (0x1u<<6),
146 USAGE_STORAGE_BUFFER = (0x1u<<7),
148 USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
149 USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
151 // \todo This is probably almost impossible to do
152 USAGE_INDIRECT_BUFFER = (0x1u<<10),
154 // Texture usage flags
155 USAGE_TEXTURE_SAMPLED = (0x1u<<11),
156 USAGE_TEXTURE_STORAGE = (0x1u<<12),
157 USAGE_COLOR_ATTACHMENT = (0x1u<<13),
158 USAGE_INPUT_ATTACHMENT = (0x1u<<14),
159 USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
162 bool supportsDeviceBufferWrites (Usage usage)
164 if (usage & USAGE_TRANSFER_DST)
167 if (usage & USAGE_STORAGE_BUFFER)
170 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
176 bool supportsDeviceImageWrites (Usage usage)
178 if (usage & USAGE_TRANSFER_DST)
181 if (usage & USAGE_TEXTURE_STORAGE)
184 if (usage & USAGE_COLOR_ATTACHMENT)
190 // Sequential access enums
193 ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
194 ACCESS_INDEX_READ_BIT,
195 ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
196 ACCESS_UNIFORM_READ_BIT,
197 ACCESS_INPUT_ATTACHMENT_READ_BIT,
198 ACCESS_SHADER_READ_BIT,
199 ACCESS_SHADER_WRITE_BIT,
200 ACCESS_COLOR_ATTACHMENT_READ_BIT,
201 ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
202 ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
203 ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
204 ACCESS_TRANSFER_READ_BIT,
205 ACCESS_TRANSFER_WRITE_BIT,
206 ACCESS_HOST_READ_BIT,
207 ACCESS_HOST_WRITE_BIT,
208 ACCESS_MEMORY_READ_BIT,
209 ACCESS_MEMORY_WRITE_BIT,
214 // Sequential stage enums
217 PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
218 PIPELINESTAGE_DRAW_INDIRECT_BIT,
219 PIPELINESTAGE_VERTEX_INPUT_BIT,
220 PIPELINESTAGE_VERTEX_SHADER_BIT,
221 PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
222 PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
223 PIPELINESTAGE_GEOMETRY_SHADER_BIT,
224 PIPELINESTAGE_FRAGMENT_SHADER_BIT,
225 PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
226 PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
227 PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
228 PIPELINESTAGE_COMPUTE_SHADER_BIT,
229 PIPELINESTAGE_TRANSFER_BIT,
230 PIPELINESTAGE_HOST_BIT,
235 PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
239 case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT: return PIPELINESTAGE_TOP_OF_PIPE_BIT;
240 case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT: return PIPELINESTAGE_DRAW_INDIRECT_BIT;
241 case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT: return PIPELINESTAGE_VERTEX_INPUT_BIT;
242 case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT: return PIPELINESTAGE_VERTEX_SHADER_BIT;
243 case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
244 case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT: return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
245 case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT: return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
246 case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT: return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
247 case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
248 case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT: return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
249 case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT: return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
250 case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT: return PIPELINESTAGE_COMPUTE_SHADER_BIT;
251 case vk::VK_PIPELINE_STAGE_TRANSFER_BIT: return PIPELINESTAGE_TRANSFER_BIT;
252 case vk::VK_PIPELINE_STAGE_HOST_BIT: return PIPELINESTAGE_HOST_BIT;
255 DE_FATAL("Unknown pipeline stage flags");
256 return PIPELINESTAGE_LAST;
260 Usage operator| (Usage a, Usage b)
262 return (Usage)((deUint32)a | (deUint32)b);
265 Usage operator& (Usage a, Usage b)
267 return (Usage)((deUint32)a & (deUint32)b);
270 string usageToName (Usage usage)
275 const char* const name;
278 { USAGE_HOST_READ, "host_read" },
279 { USAGE_HOST_WRITE, "host_write" },
281 { USAGE_TRANSFER_SRC, "transfer_src" },
282 { USAGE_TRANSFER_DST, "transfer_dst" },
284 { USAGE_INDEX_BUFFER, "index_buffer" },
285 { USAGE_VERTEX_BUFFER, "vertex_buffer" },
286 { USAGE_UNIFORM_BUFFER, "uniform_buffer" },
287 { USAGE_STORAGE_BUFFER, "storage_buffer" },
288 { USAGE_UNIFORM_TEXEL_BUFFER, "uniform_texel_buffer" },
289 { USAGE_STORAGE_TEXEL_BUFFER, "storage_texel_buffer" },
290 { USAGE_INDIRECT_BUFFER, "indirect_buffer" },
291 { USAGE_TEXTURE_SAMPLED, "sampled_texture" },
292 { USAGE_TEXTURE_STORAGE, "texture_storage" },
293 { USAGE_COLOR_ATTACHMENT, "color_attachment" },
294 { USAGE_INPUT_ATTACHMENT, "input_attachment" },
295 { USAGE_DEPTH_STENCIL_ATTACHMENT, "depth_stencil_attachment" },
298 std::ostringstream stream;
301 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
303 if (usage & usageNames[usageNdx].usage)
310 stream << usageNames[usageNdx].name;
317 vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
319 vk::VkBufferUsageFlags flags = 0;
321 if (usage & USAGE_TRANSFER_SRC)
322 flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
324 if (usage & USAGE_TRANSFER_DST)
325 flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
327 if (usage & USAGE_INDEX_BUFFER)
328 flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
330 if (usage & USAGE_VERTEX_BUFFER)
331 flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
333 if (usage & USAGE_INDIRECT_BUFFER)
334 flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
336 if (usage & USAGE_UNIFORM_BUFFER)
337 flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
339 if (usage & USAGE_STORAGE_BUFFER)
340 flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
342 if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
343 flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
345 if (usage & USAGE_STORAGE_TEXEL_BUFFER)
346 flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
351 vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
353 vk::VkImageUsageFlags flags = 0;
355 if (usage & USAGE_TRANSFER_SRC)
356 flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
358 if (usage & USAGE_TRANSFER_DST)
359 flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
361 if (usage & USAGE_TEXTURE_SAMPLED)
362 flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
364 if (usage & USAGE_TEXTURE_STORAGE)
365 flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
367 if (usage & USAGE_COLOR_ATTACHMENT)
368 flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
370 if (usage & USAGE_INPUT_ATTACHMENT)
371 flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
373 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
374 flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
379 vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
381 vk::VkPipelineStageFlags flags = 0;
383 if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
384 flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
386 if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
387 flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
389 if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
390 flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
392 if (usage & USAGE_INDIRECT_BUFFER)
393 flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
396 (USAGE_UNIFORM_BUFFER
397 | USAGE_STORAGE_BUFFER
398 | USAGE_UNIFORM_TEXEL_BUFFER
399 | USAGE_STORAGE_TEXEL_BUFFER
400 | USAGE_TEXTURE_SAMPLED
401 | USAGE_TEXTURE_STORAGE))
403 flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
404 | vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
405 | vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
406 | vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
407 | vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
408 | vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
411 if (usage & USAGE_INPUT_ATTACHMENT)
412 flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
414 if (usage & USAGE_COLOR_ATTACHMENT)
415 flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
417 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
419 flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
420 | vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
426 vk::VkAccessFlags usageToAccessFlags (Usage usage)
428 vk::VkAccessFlags flags = 0;
430 if (usage & USAGE_HOST_READ)
431 flags |= vk::VK_ACCESS_HOST_READ_BIT;
433 if (usage & USAGE_HOST_WRITE)
434 flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
436 if (usage & USAGE_TRANSFER_SRC)
437 flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
439 if (usage & USAGE_TRANSFER_DST)
440 flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
442 if (usage & USAGE_INDEX_BUFFER)
443 flags |= vk::VK_ACCESS_INDEX_READ_BIT;
445 if (usage & USAGE_VERTEX_BUFFER)
446 flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
448 if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
449 flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
451 if (usage & (USAGE_STORAGE_BUFFER
452 | USAGE_STORAGE_TEXEL_BUFFER
453 | USAGE_TEXTURE_SAMPLED
454 | USAGE_TEXTURE_STORAGE))
455 flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
457 if (usage & USAGE_INDIRECT_BUFFER)
458 flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
460 if (usage & USAGE_COLOR_ATTACHMENT)
461 flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
463 if (usage & USAGE_INPUT_ATTACHMENT)
464 flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
466 if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
467 flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
468 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
476 vk::VkDeviceSize size;
477 vk::VkSharingMode sharing;
480 vk::Move<vk::VkCommandBuffer> createCommandBuffer (const vk::DeviceInterface& vkd,
482 vk::VkCommandPool pool,
483 vk::VkCommandBufferLevel level)
485 const vk::VkCommandBufferAllocateInfo bufferInfo =
487 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
495 return vk::allocateCommandBuffer(vkd, device, &bufferInfo);
498 vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface& vkd,
500 vk::VkCommandPool pool,
501 vk::VkCommandBufferLevel level)
503 const vk::VkCommandBufferBeginInfo beginInfo =
505 vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
516 vk::Move<vk::VkCommandBuffer> commandBuffer (createCommandBuffer(vkd, device, pool, level));
518 vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
520 return commandBuffer;
523 vk::Move<vk::VkCommandPool> createCommandPool (const vk::DeviceInterface& vkd,
525 deUint32 queueFamilyIndex)
527 const vk::VkCommandPoolCreateInfo poolInfo =
529 vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
532 vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
536 return vk::createCommandPool(vkd, device, &poolInfo);
539 vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface& vkd,
541 vk::VkDeviceSize size,
542 vk::VkBufferUsageFlags usage,
543 vk::VkSharingMode sharingMode,
544 const vector<deUint32>& queueFamilies)
546 const vk::VkBufferCreateInfo createInfo =
548 vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
555 (deUint32)queueFamilies.size(),
559 return vk::createBuffer(vkd, device, &createInfo);
562 vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface& vkd,
564 vk::VkDeviceSize size,
565 deUint32 memoryTypeIndex)
567 const vk::VkMemoryAllocateInfo alloc =
569 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
576 return vk::allocateMemory(vkd, device, &alloc);
579 vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface& vki,
580 const vk::DeviceInterface& vkd,
581 vk::VkPhysicalDevice physicalDevice,
584 vk::VkMemoryPropertyFlags properties)
586 const vk::VkMemoryRequirements memoryRequirements = vk::getBufferMemoryRequirements(vkd, device, buffer);
587 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
588 deUint32 memoryTypeIndex;
590 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
592 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
593 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
597 const vk::VkMemoryAllocateInfo allocationInfo =
599 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
601 memoryRequirements.size,
604 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
606 VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
610 catch (const vk::Error& error)
612 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
613 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
615 // Try next memory type/heap if out of memory
619 // Throw all other errors forward
626 TCU_FAIL("Failed to allocate memory for buffer");
629 vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface& vki,
630 const vk::DeviceInterface& vkd,
631 vk::VkPhysicalDevice physicalDevice,
634 vk::VkMemoryPropertyFlags properties)
636 const vk::VkMemoryRequirements memoryRequirements = vk::getImageMemoryRequirements(vkd, device, image);
637 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
638 deUint32 memoryTypeIndex;
640 for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
642 if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
643 && (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
647 const vk::VkMemoryAllocateInfo allocationInfo =
649 vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
651 memoryRequirements.size,
654 vk::Move<vk::VkDeviceMemory> memory (vk::allocateMemory(vkd, device, &allocationInfo));
656 VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
660 catch (const vk::Error& error)
662 if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
663 || error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
665 // Try next memory type/heap if out of memory
669 // Throw all other errors forward
676 TCU_FAIL("Failed to allocate memory for image");
679 void queueRun (const vk::DeviceInterface& vkd,
681 vk::VkCommandBuffer commandBuffer)
683 const vk::VkSubmitInfo submitInfo =
685 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
698 VK_CHECK(vkd.queueSubmit(queue, 1, &submitInfo, 0));
699 VK_CHECK(vkd.queueWaitIdle(queue));
702 void* mapMemory (const vk::DeviceInterface& vkd,
704 vk::VkDeviceMemory memory,
705 vk::VkDeviceSize size)
709 VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
714 class ReferenceMemory
717 ReferenceMemory (size_t size);
719 void set (size_t pos, deUint8 val);
720 deUint8 get (size_t pos) const;
721 bool isDefined (size_t pos) const;
723 void setDefined (size_t offset, size_t size, const void* data);
724 void setUndefined (size_t offset, size_t size);
725 void setData (size_t offset, size_t size, const void* data);
727 size_t getSize (void) const { return m_data.size(); }
730 vector<deUint8> m_data;
731 vector<deUint64> m_defined;
734 ReferenceMemory::ReferenceMemory (size_t size)
736 , m_defined (size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
740 void ReferenceMemory::set (size_t pos, deUint8 val)
743 m_defined[pos / 64] |= 0x1ull << (pos % 64);
746 void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
748 const deUint8* data = (const deUint8*)data_;
751 for (size_t pos = 0; pos < size; pos++)
753 m_data[offset + pos] = data[pos];
754 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
758 void ReferenceMemory::setUndefined (size_t offset, size_t size)
761 for (size_t pos = 0; pos < size; pos++)
762 m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
765 deUint8 ReferenceMemory::get (size_t pos) const
767 DE_ASSERT(isDefined(pos));
771 bool ReferenceMemory::isDefined (size_t pos) const
773 return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
779 Memory (const vk::InstanceInterface& vki,
780 const vk::DeviceInterface& vkd,
781 vk::VkPhysicalDevice physicalDevice,
783 vk::VkDeviceSize size,
784 deUint32 memoryTypeIndex,
785 vk::VkDeviceSize maxBufferSize,
786 deInt32 maxImageWidth,
787 deInt32 maxImageHeight);
789 vk::VkDeviceSize getSize (void) const { return m_size; }
790 vk::VkDeviceSize getMaxBufferSize (void) const { return m_maxBufferSize; }
791 bool getSupportBuffers (void) const { return m_maxBufferSize > 0; }
793 deInt32 getMaxImageWidth (void) const { return m_maxImageWidth; }
794 deInt32 getMaxImageHeight (void) const { return m_maxImageHeight; }
795 bool getSupportImages (void) const { return m_maxImageWidth > 0; }
797 const vk::VkMemoryType& getMemoryType (void) const { return m_memoryType; }
798 deUint32 getMemoryTypeIndex (void) const { return m_memoryTypeIndex; }
799 vk::VkDeviceMemory getMemory (void) const { return *m_memory; }
802 const vk::VkDeviceSize m_size;
803 const deUint32 m_memoryTypeIndex;
804 const vk::VkMemoryType m_memoryType;
805 const vk::Unique<vk::VkDeviceMemory> m_memory;
806 const vk::VkDeviceSize m_maxBufferSize;
807 const deInt32 m_maxImageWidth;
808 const deInt32 m_maxImageHeight;
811 vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface& vki,
812 vk::VkPhysicalDevice device,
813 deUint32 memoryTypeIndex)
815 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
817 DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
819 return memoryProperties.memoryTypes[memoryTypeIndex];
822 vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface& vkd,
825 vk::VkBufferUsageFlags usage,
826 vk::VkSharingMode sharingMode,
827 const vector<deUint32>& queueFamilies,
829 vk::VkDeviceSize memorySize,
830 deUint32 memoryTypeIndex)
832 vk::VkDeviceSize lastSuccess = 0;
833 vk::VkDeviceSize currentSize = memorySize / 2;
836 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
837 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
839 if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
843 for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
845 const vk::Unique<vk::VkBuffer> buffer (createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
846 const vk::VkMemoryRequirements requirements (vk::getBufferMemoryRequirements(vkd, device, *buffer));
848 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
850 lastSuccess = currentSize;
851 currentSize += stepSize;
854 currentSize -= stepSize;
863 // Round size down maximum W * H * 4, where W and H < 4096
864 vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
866 const vk::VkDeviceSize maxTextureSize = 4096;
867 vk::VkDeviceSize maxTexelCount = size / 4;
868 vk::VkDeviceSize bestW = de::max(maxTexelCount, maxTextureSize);
869 vk::VkDeviceSize bestH = maxTexelCount / bestW;
871 // \todo Could probably be faster?
872 for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
874 const vk::VkDeviceSize h = maxTexelCount / w;
876 if (bestW * bestH < w * h)
883 return bestW * bestH * 4;
886 // Find RGBA8 image size that has exactly "size" of number of bytes.
887 // "size" must be W * H * 4 where W and H < 4096
888 IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
890 const vk::VkDeviceSize maxTextureSize = 4096;
891 vk::VkDeviceSize texelCount = size / 4;
893 DE_ASSERT((size % 4) == 0);
895 // \todo Could probably be faster?
896 for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
898 const vk::VkDeviceSize h = texelCount / w;
900 if ((texelCount % w) == 0 && h < maxTextureSize)
901 return IVec2((int)w, (int)h);
904 DE_FATAL("Invalid size");
905 return IVec2(-1, -1);
908 IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface& vkd,
911 vk::VkBufferUsageFlags usage,
912 vk::VkSharingMode sharingMode,
913 const vector<deUint32>& queueFamilies,
915 vk::VkDeviceSize memorySize,
916 deUint32 memoryTypeIndex)
918 IVec2 lastSuccess (0);
922 const deUint32 texelCount = (deUint32)(memorySize / 4);
923 const deUint32 width = (deUint32)deFloatSqrt((float)texelCount);
924 const deUint32 height = texelCount / width;
926 currentSize[0] = deMaxu32(width, height);
927 currentSize[1] = deMinu32(width, height);
930 for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
932 const vk::VkImageCreateInfo createInfo =
934 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
938 vk::VK_IMAGE_TYPE_2D,
939 vk::VK_FORMAT_R8G8B8A8_UNORM,
946 vk::VK_SAMPLE_COUNT_1_BIT,
947 vk::VK_IMAGE_TILING_OPTIMAL,
950 (deUint32)queueFamilies.size(),
952 vk::VK_IMAGE_LAYOUT_UNDEFINED
954 const vk::Unique<vk::VkImage> image (vk::createImage(vkd, device, &createInfo));
955 const vk::VkMemoryRequirements requirements (vk::getImageMemoryRequirements(vkd, device, *image));
957 if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
959 lastSuccess = currentSize;
960 currentSize[0] += stepSize;
961 currentSize[1] += stepSize;
965 currentSize[0] -= stepSize;
966 currentSize[1] -= stepSize;
976 Memory::Memory (const vk::InstanceInterface& vki,
977 const vk::DeviceInterface& vkd,
978 vk::VkPhysicalDevice physicalDevice,
980 vk::VkDeviceSize size,
981 deUint32 memoryTypeIndex,
982 vk::VkDeviceSize maxBufferSize,
983 deInt32 maxImageWidth,
984 deInt32 maxImageHeight)
986 , m_memoryTypeIndex (memoryTypeIndex)
987 , m_memoryType (getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
988 , m_memory (allocMemory(vkd, device, size, memoryTypeIndex))
989 , m_maxBufferSize (maxBufferSize)
990 , m_maxImageWidth (maxImageWidth)
991 , m_maxImageHeight (maxImageHeight)
998 Context (const vk::InstanceInterface& vki,
999 const vk::DeviceInterface& vkd,
1000 vk::VkPhysicalDevice physicalDevice,
1001 vk::VkDevice device,
1003 deUint32 queueFamilyIndex,
1004 const vector<pair<deUint32, vk::VkQueue> >& queues,
1005 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection)
1008 , m_physicalDevice (physicalDevice)
1011 , m_queueFamilyIndex (queueFamilyIndex)
1013 , m_commandPool (createCommandPool(vkd, device, queueFamilyIndex))
1014 , m_binaryCollection (binaryCollection)
1016 for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
1017 m_queueFamilies.push_back(m_queues[queueNdx].first);
1020 const vk::InstanceInterface& getInstanceInterface (void) const { return m_vki; }
1021 vk::VkPhysicalDevice getPhysicalDevice (void) const { return m_physicalDevice; }
1022 vk::VkDevice getDevice (void) const { return m_device; }
1023 const vk::DeviceInterface& getDeviceInterface (void) const { return m_vkd; }
1024 vk::VkQueue getQueue (void) const { return m_queue; }
1025 deUint32 getQueueFamily (void) const { return m_queueFamilyIndex; }
1026 const vector<pair<deUint32, vk::VkQueue> >& getQueues (void) const { return m_queues; }
1027 const vector<deUint32> getQueueFamilies (void) const { return m_queueFamilies; }
1028 vk::VkCommandPool getCommandPool (void) const { return *m_commandPool; }
1029 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_binaryCollection; }
1032 const vk::InstanceInterface& m_vki;
1033 const vk::DeviceInterface& m_vkd;
1034 const vk::VkPhysicalDevice m_physicalDevice;
1035 const vk::VkDevice m_device;
1036 const vk::VkQueue m_queue;
1037 const deUint32 m_queueFamilyIndex;
1038 const vector<pair<deUint32, vk::VkQueue> >& m_queues;
1039 const vk::Unique<vk::VkCommandPool> m_commandPool;
1040 const vk::ProgramCollection<vk::ProgramBinary>& m_binaryCollection;
1041 vector<deUint32> m_queueFamilies;
1044 class PrepareContext
1047 PrepareContext (const Context& context,
1048 const Memory& memory)
1049 : m_context (context)
1054 const Memory& getMemory (void) const { return m_memory; }
1055 const Context& getContext (void) const { return m_context; }
1056 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
1058 void setBuffer (vk::Move<vk::VkBuffer> buffer,
1059 vk::VkDeviceSize size)
1061 DE_ASSERT(!m_currentImage);
1062 DE_ASSERT(!m_currentBuffer);
1064 m_currentBuffer = buffer;
1065 m_currentBufferSize = size;
1068 vk::VkBuffer getBuffer (void) const { return *m_currentBuffer; }
1069 vk::VkDeviceSize getBufferSize (void) const
1071 DE_ASSERT(m_currentBuffer);
1072 return m_currentBufferSize;
1075 void releaseBuffer (void) { m_currentBuffer.disown(); }
1077 void setImage (vk::Move<vk::VkImage> image,
1078 vk::VkImageLayout layout,
1079 vk::VkDeviceSize memorySize,
1083 DE_ASSERT(!m_currentImage);
1084 DE_ASSERT(!m_currentBuffer);
1086 m_currentImage = image;
1087 m_currentImageMemorySize = memorySize;
1088 m_currentImageLayout = layout;
1089 m_currentImageWidth = width;
1090 m_currentImageHeight = height;
1093 void setImageLayout (vk::VkImageLayout layout)
1095 DE_ASSERT(m_currentImage);
1096 m_currentImageLayout = layout;
1099 vk::VkImage getImage (void) const { return *m_currentImage; }
1100 deInt32 getImageWidth (void) const
1102 DE_ASSERT(m_currentImage);
1103 return m_currentImageWidth;
1105 deInt32 getImageHeight (void) const
1107 DE_ASSERT(m_currentImage);
1108 return m_currentImageHeight;
1110 vk::VkDeviceSize getImageMemorySize (void) const
1112 DE_ASSERT(m_currentImage);
1113 return m_currentImageMemorySize;
1116 void releaseImage (void) { m_currentImage.disown(); }
1118 vk::VkImageLayout getImageLayout (void) const
1120 DE_ASSERT(m_currentImage);
1121 return m_currentImageLayout;
1125 const Context& m_context;
1126 const Memory& m_memory;
1128 vk::Move<vk::VkBuffer> m_currentBuffer;
1129 vk::VkDeviceSize m_currentBufferSize;
1131 vk::Move<vk::VkImage> m_currentImage;
1132 vk::VkDeviceSize m_currentImageMemorySize;
1133 vk::VkImageLayout m_currentImageLayout;
1134 deInt32 m_currentImageWidth;
1135 deInt32 m_currentImageHeight;
1138 class ExecuteContext
1141 ExecuteContext (const Context& context)
1142 : m_context (context)
1146 const Context& getContext (void) const { return m_context; }
1147 void setMapping (void* ptr) { m_mapping = ptr; }
1148 void* getMapping (void) const { return m_mapping; }
1151 const Context& m_context;
1158 VerifyContext (TestLog& log,
1159 tcu::ResultCollector& resultCollector,
1160 const Context& context,
1161 vk::VkDeviceSize size)
1163 , m_resultCollector (resultCollector)
1164 , m_context (context)
1165 , m_reference ((size_t)size)
1169 const Context& getContext (void) const { return m_context; }
1170 TestLog& getLog (void) const { return m_log; }
1171 tcu::ResultCollector& getResultCollector (void) const { return m_resultCollector; }
1173 ReferenceMemory& getReference (void) { return m_reference; }
1174 TextureLevel& getReferenceImage (void) { return m_referenceImage;}
1178 tcu::ResultCollector& m_resultCollector;
1179 const Context& m_context;
1180 ReferenceMemory m_reference;
1181 TextureLevel m_referenceImage;
1187 // Constructor should allocate all non-vulkan resources.
1188 virtual ~Command (void) {}
1190 // Get name of the command
1191 virtual const char* getName (void) const = 0;
1193 // Log prepare operations
1194 virtual void logPrepare (TestLog&, size_t) const {}
1195 // Log executed operations
1196 virtual void logExecute (TestLog&, size_t) const {}
1198 // Prepare should allocate all vulkan resources and resources that require
1199 // that buffer or memory has been already allocated. This should build all
1200 // command buffers etc.
1201 virtual void prepare (PrepareContext&) {}
1203 // Execute command. Write or read mapped memory, submit commands to queue
1205 virtual void execute (ExecuteContext&) {}
1207 // Verify that results are correct.
1208 virtual void verify (VerifyContext&, size_t) {}
1211 // Allow only inheritance
1216 Command (const Command&);
1217 Command& operator& (const Command&);
1220 class Map : public Command
1225 const char* getName (void) const { return "Map"; }
1228 void logExecute (TestLog& log, size_t commandIndex) const
1230 log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
1233 void prepare (PrepareContext& context)
1235 m_memory = context.getMemory().getMemory();
1236 m_size = context.getMemory().getSize();
1239 void execute (ExecuteContext& context)
1241 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1242 const vk::VkDevice device = context.getContext().getDevice();
1244 context.setMapping(mapMemory(vkd, device, m_memory, m_size));
1248 vk::VkDeviceMemory m_memory;
1249 vk::VkDeviceSize m_size;
1252 class UnMap : public Command
1257 const char* getName (void) const { return "UnMap"; }
1259 void logExecute (TestLog& log, size_t commandIndex) const
1261 log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
1264 void prepare (PrepareContext& context)
1266 m_memory = context.getMemory().getMemory();
1269 void execute (ExecuteContext& context)
1271 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1272 const vk::VkDevice device = context.getContext().getDevice();
1274 vkd.unmapMemory(device, m_memory);
1275 context.setMapping(DE_NULL);
1279 vk::VkDeviceMemory m_memory;
1282 class Invalidate : public Command
1285 Invalidate (void) {}
1286 ~Invalidate (void) {}
1287 const char* getName (void) const { return "Invalidate"; }
1289 void logExecute (TestLog& log, size_t commandIndex) const
1291 log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
1294 void prepare (PrepareContext& context)
1296 m_memory = context.getMemory().getMemory();
1297 m_size = context.getMemory().getSize();
1300 void execute (ExecuteContext& context)
1302 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1303 const vk::VkDevice device = context.getContext().getDevice();
1305 vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1309 vk::VkDeviceMemory m_memory;
1310 vk::VkDeviceSize m_size;
1313 class Flush : public Command
1318 const char* getName (void) const { return "Flush"; }
1320 void logExecute (TestLog& log, size_t commandIndex) const
1322 log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
1325 void prepare (PrepareContext& context)
1327 m_memory = context.getMemory().getMemory();
1328 m_size = context.getMemory().getSize();
1331 void execute (ExecuteContext& context)
1333 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1334 const vk::VkDevice device = context.getContext().getDevice();
1336 vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1340 vk::VkDeviceMemory m_memory;
1341 vk::VkDeviceSize m_size;
1344 // Host memory reads and writes
1345 class HostMemoryAccess : public Command
1348 HostMemoryAccess (bool read, bool write, deUint32 seed);
1349 ~HostMemoryAccess (void) {}
1350 const char* getName (void) const { return "HostMemoryAccess"; }
1352 void logExecute (TestLog& log, size_t commandIndex) const;
1353 void prepare (PrepareContext& context);
1354 void execute (ExecuteContext& context);
1356 void verify (VerifyContext& context, size_t commandIndex);
1361 const deUint32 m_seed;
1364 vector<deUint8> m_readData;
1367 HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
1374 void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
1376 log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "") << ", seed: " << m_seed << TestLog::EndMessage;
1379 void HostMemoryAccess::prepare (PrepareContext& context)
1381 m_size = (size_t)context.getMemory().getSize();
1384 m_readData.resize(m_size, 0);
1387 void HostMemoryAccess::execute (ExecuteContext& context)
1389 de::Random rng (m_seed);
1390 deUint8* const ptr = (deUint8*)context.getMapping();
1392 if (m_read && m_write)
1394 for (size_t pos = 0; pos < m_size; pos++)
1396 const deUint8 mask = rng.getUint8();
1397 const deUint8 value = ptr[pos];
1399 m_readData[pos] = value;
1400 ptr[pos] = value ^ mask;
1405 for (size_t pos = 0; pos < m_size; pos++)
1407 const deUint8 value = ptr[pos];
1409 m_readData[pos] = value;
1414 for (size_t pos = 0; pos < m_size; pos++)
1416 const deUint8 value = rng.getUint8();
1422 DE_FATAL("Host memory access without read or write.");
1425 void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
1427 tcu::ResultCollector& resultCollector = context.getResultCollector();
1428 ReferenceMemory& reference = context.getReference();
1429 de::Random rng (m_seed);
1431 if (m_read && m_write)
1433 for (size_t pos = 0; pos < m_size; pos++)
1435 const deUint8 mask = rng.getUint8();
1436 const deUint8 value = m_readData[pos];
1438 if (reference.isDefined(pos))
1440 if (value != reference.get(pos))
1442 resultCollector.fail(
1443 de::toString(commandIndex) + ":" + getName()
1444 + " Result differs from reference, Expected: "
1445 + de::toString(tcu::toHex<8>(reference.get(pos)))
1447 + de::toString(tcu::toHex<8>(value))
1449 + de::toString(pos));
1454 reference.set(pos, value ^ mask);
1459 for (size_t pos = 0; pos < m_size; pos++)
1461 const deUint8 value = m_readData[pos];
1463 if (reference.isDefined(pos))
1465 if (value != reference.get(pos))
1467 resultCollector.fail(
1468 de::toString(commandIndex) + ":" + getName()
1469 + " Result differs from reference, Expected: "
1470 + de::toString(tcu::toHex<8>(reference.get(pos)))
1472 + de::toString(tcu::toHex<8>(value))
1474 + de::toString(pos));
1479 reference.set(pos, value);
1484 for (size_t pos = 0; pos < m_size; pos++)
1486 const deUint8 value = rng.getUint8();
1488 reference.set(pos, value);
1492 DE_FATAL("Host memory access without read or write.");
1495 class CreateBuffer : public Command
1498 CreateBuffer (vk::VkBufferUsageFlags usage,
1499 vk::VkSharingMode sharing);
1500 ~CreateBuffer (void) {}
1501 const char* getName (void) const { return "CreateBuffer"; }
1503 void logPrepare (TestLog& log, size_t commandIndex) const;
1504 void prepare (PrepareContext& context);
1507 const vk::VkBufferUsageFlags m_usage;
1508 const vk::VkSharingMode m_sharing;
1511 CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags usage,
1512 vk::VkSharingMode sharing)
1514 , m_sharing (sharing)
1518 void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1520 log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
1523 void CreateBuffer::prepare (PrepareContext& context)
1525 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1526 const vk::VkDevice device = context.getContext().getDevice();
1527 const vk::VkDeviceSize bufferSize = context.getMemory().getMaxBufferSize();
1528 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1530 context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
1533 class DestroyBuffer : public Command
1536 DestroyBuffer (void);
1537 ~DestroyBuffer (void) {}
1538 const char* getName (void) const { return "DestroyBuffer"; }
1540 void logExecute (TestLog& log, size_t commandIndex) const;
1541 void prepare (PrepareContext& context);
1542 void execute (ExecuteContext& context);
1545 vk::Move<vk::VkBuffer> m_buffer;
1548 DestroyBuffer::DestroyBuffer (void)
1552 void DestroyBuffer::prepare (PrepareContext& context)
1554 m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1555 context.releaseBuffer();
1558 void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
1560 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
1563 void DestroyBuffer::execute (ExecuteContext& context)
1565 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1566 const vk::VkDevice device = context.getContext().getDevice();
1568 vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
1571 class BindBufferMemory : public Command
1574 BindBufferMemory (void) {}
1575 ~BindBufferMemory (void) {}
1576 const char* getName (void) const { return "BindBufferMemory"; }
1578 void logPrepare (TestLog& log, size_t commandIndex) const;
1579 void prepare (PrepareContext& context);
1582 void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
1584 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
1587 void BindBufferMemory::prepare (PrepareContext& context)
1589 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1590 const vk::VkDevice device = context.getContext().getDevice();
1592 VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
1595 class CreateImage : public Command
1598 CreateImage (vk::VkImageUsageFlags usage,
1599 vk::VkSharingMode sharing);
1600 ~CreateImage (void) {}
1601 const char* getName (void) const { return "CreateImage"; }
1603 void logPrepare (TestLog& log, size_t commandIndex) const;
1604 void prepare (PrepareContext& context);
1605 void verify (VerifyContext& context, size_t commandIndex);
1608 const vk::VkImageUsageFlags m_usage;
1609 const vk::VkSharingMode m_sharing;
1610 deInt32 m_imageWidth;
1611 deInt32 m_imageHeight;
1614 CreateImage::CreateImage (vk::VkImageUsageFlags usage,
1615 vk::VkSharingMode sharing)
1617 , m_sharing (sharing)
1621 void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
1623 log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage) << TestLog::EndMessage;
1626 void CreateImage::prepare (PrepareContext& context)
1628 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1629 const vk::VkDevice device = context.getContext().getDevice();
1630 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
1632 m_imageWidth = context.getMemory().getMaxImageWidth();
1633 m_imageHeight = context.getMemory().getMaxImageHeight();
1636 const vk::VkImageCreateInfo createInfo =
1638 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1642 vk::VK_IMAGE_TYPE_2D,
1643 vk::VK_FORMAT_R8G8B8A8_UNORM,
1650 vk::VK_SAMPLE_COUNT_1_BIT,
1651 vk::VK_IMAGE_TILING_OPTIMAL,
1654 (deUint32)queueFamilies.size(),
1656 vk::VK_IMAGE_LAYOUT_UNDEFINED
1658 vk::Move<vk::VkImage> image (createImage(vkd, device, &createInfo));
1659 const vk::VkMemoryRequirements requirements = vk::getImageMemoryRequirements(vkd, device, *image);
1661 context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
1665 void CreateImage::verify (VerifyContext& context, size_t)
1667 context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
1670 class DestroyImage : public Command
1673 DestroyImage (void);
1674 ~DestroyImage (void) {}
1675 const char* getName (void) const { return "DestroyImage"; }
1677 void logExecute (TestLog& log, size_t commandIndex) const;
1678 void prepare (PrepareContext& context);
1679 void execute (ExecuteContext& context);
1682 vk::Move<vk::VkImage> m_image;
1685 DestroyImage::DestroyImage (void)
1689 void DestroyImage::prepare (PrepareContext& context)
1691 m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1692 context.releaseImage();
1696 void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
1698 log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
1701 void DestroyImage::execute (ExecuteContext& context)
1703 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1704 const vk::VkDevice device = context.getContext().getDevice();
1706 vkd.destroyImage(device, m_image.disown(), DE_NULL);
1709 class BindImageMemory : public Command
1712 BindImageMemory (void) {}
1713 ~BindImageMemory (void) {}
1714 const char* getName (void) const { return "BindImageMemory"; }
1716 void logPrepare (TestLog& log, size_t commandIndex) const;
1717 void prepare (PrepareContext& context);
1720 void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
1722 log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
1725 void BindImageMemory::prepare (PrepareContext& context)
1727 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1728 const vk::VkDevice device = context.getContext().getDevice();
1730 VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
1733 class QueueWaitIdle : public Command
1736 QueueWaitIdle (void) {}
1737 ~QueueWaitIdle (void) {}
1738 const char* getName (void) const { return "QueuetWaitIdle"; }
1740 void logExecute (TestLog& log, size_t commandIndex) const;
1741 void execute (ExecuteContext& context);
1744 void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1746 log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
1749 void QueueWaitIdle::execute (ExecuteContext& context)
1751 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1752 const vk::VkQueue queue = context.getContext().getQueue();
1754 VK_CHECK(vkd.queueWaitIdle(queue));
1757 class DeviceWaitIdle : public Command
1760 DeviceWaitIdle (void) {}
1761 ~DeviceWaitIdle (void) {}
1762 const char* getName (void) const { return "DeviceWaitIdle"; }
1764 void logExecute (TestLog& log, size_t commandIndex) const;
1765 void execute (ExecuteContext& context);
1768 void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1770 log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
1773 void DeviceWaitIdle::execute (ExecuteContext& context)
1775 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1776 const vk::VkDevice device = context.getContext().getDevice();
1778 VK_CHECK(vkd.deviceWaitIdle(device));
1784 SubmitContext (const PrepareContext& context,
1785 const vk::VkCommandBuffer commandBuffer)
1786 : m_context (context)
1787 , m_commandBuffer (commandBuffer)
1791 const Memory& getMemory (void) const { return m_context.getMemory(); }
1792 const Context& getContext (void) const { return m_context.getContext(); }
1793 vk::VkCommandBuffer getCommandBuffer (void) const { return m_commandBuffer; }
1795 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
1796 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
1798 vk::VkImage getImage (void) const { return m_context.getImage(); }
1799 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
1800 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
1801 vk::VkImageLayout getImageLayout (void) const { return m_context.getImageLayout(); }
1804 const PrepareContext& m_context;
1805 const vk::VkCommandBuffer m_commandBuffer;
1811 virtual ~CmdCommand (void) {}
1812 virtual const char* getName (void) const = 0;
1814 // Log things that are done during prepare
1815 virtual void logPrepare (TestLog&, size_t) const {}
1816 // Log submitted calls etc.
1817 virtual void logSubmit (TestLog&, size_t) const {}
1819 // Allocate vulkan resources and prepare for submit.
1820 virtual void prepare (PrepareContext&) {}
1822 // Submit commands to command buffer.
1823 virtual void submit (SubmitContext&) {}
1826 virtual void verify (VerifyContext&, size_t) {}
1829 class SubmitCommandBuffer : public Command
1832 SubmitCommandBuffer (const vector<CmdCommand*>& commands);
1833 ~SubmitCommandBuffer (void);
1835 const char* getName (void) const { return "SubmitCommandBuffer"; }
1836 void logExecute (TestLog& log, size_t commandIndex) const;
1837 void logPrepare (TestLog& log, size_t commandIndex) const;
1839 // Allocate command buffer and submit commands to command buffer
1840 void prepare (PrepareContext& context);
1841 void execute (ExecuteContext& context);
1843 // Verify that results are correct.
1844 void verify (VerifyContext& context, size_t commandIndex);
1847 vector<CmdCommand*> m_commands;
1848 vk::Move<vk::VkCommandBuffer> m_commandBuffer;
1851 SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
1852 : m_commands (commands)
1856 SubmitCommandBuffer::~SubmitCommandBuffer (void)
1858 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1859 delete m_commands[cmdNdx];
1862 void SubmitCommandBuffer::prepare (PrepareContext& context)
1864 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1865 const vk::VkDevice device = context.getContext().getDevice();
1866 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
1868 m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1870 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1872 CmdCommand& command = *m_commands[cmdNdx];
1874 command.prepare(context);
1878 SubmitContext submitContext (context, *m_commandBuffer);
1880 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1882 CmdCommand& command = *m_commands[cmdNdx];
1884 command.submit(submitContext);
1887 VK_CHECK(vkd.endCommandBuffer(*m_commandBuffer));
1891 void SubmitCommandBuffer::execute (ExecuteContext& context)
1893 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1894 const vk::VkCommandBuffer cmd = *m_commandBuffer;
1895 const vk::VkQueue queue = context.getContext().getQueue();
1896 const vk::VkSubmitInfo submit =
1898 vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
1911 vkd.queueSubmit(queue, 1, &submit, 0);
1914 void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
1916 const string sectionName (de::toString(commandIndex) + ":" + getName());
1917 const tcu::ScopedLogSection section (context.getLog(), sectionName, sectionName);
1919 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1920 m_commands[cmdNdx]->verify(context, cmdNdx);
1923 void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1925 const string sectionName (de::toString(commandIndex) + ":" + getName());
1926 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1928 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1929 m_commands[cmdNdx]->logPrepare(log, cmdNdx);
1932 void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
1934 const string sectionName (de::toString(commandIndex) + ":" + getName());
1935 const tcu::ScopedLogSection section (log, sectionName, sectionName);
1937 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1938 m_commands[cmdNdx]->logSubmit(log, cmdNdx);
1941 class PipelineBarrier : public CmdCommand
1951 PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1952 const vk::VkAccessFlags srcAccesses,
1953 const vk::VkPipelineStageFlags dstStages,
1954 const vk::VkAccessFlags dstAccesses,
1956 ~PipelineBarrier (void) {}
1957 const char* getName (void) const { return "PipelineBarrier"; }
1959 void logSubmit (TestLog& log, size_t commandIndex) const;
1960 void submit (SubmitContext& context);
1963 const vk::VkPipelineStageFlags m_srcStages;
1964 const vk::VkAccessFlags m_srcAccesses;
1965 const vk::VkPipelineStageFlags m_dstStages;
1966 const vk::VkAccessFlags m_dstAccesses;
1970 PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags srcStages,
1971 const vk::VkAccessFlags srcAccesses,
1972 const vk::VkPipelineStageFlags dstStages,
1973 const vk::VkAccessFlags dstAccesses,
1975 : m_srcStages (srcStages)
1976 , m_srcAccesses (srcAccesses)
1977 , m_dstStages (dstStages)
1978 , m_dstAccesses (dstAccesses)
1983 void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
1985 log << TestLog::Message << commandIndex << ":" << getName()
1986 << " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
1987 : m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
1988 : "Image pipeline barrier")
1989 << ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
1990 << ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
1993 void PipelineBarrier::submit (SubmitContext& context)
1995 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
1996 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2002 const vk::VkMemoryBarrier barrier =
2004 vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
2010 const void* const barriers[] =
2015 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, 0, 1, &barriers[0]);
2021 const vk::VkBufferMemoryBarrier barrier =
2023 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2029 vk::VK_QUEUE_FAMILY_IGNORED,
2030 vk::VK_QUEUE_FAMILY_IGNORED,
2032 context.getBuffer(),
2036 const void* const barriers[] =
2041 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, 0, 1, &barriers[0]);
2047 const vk::VkImageMemoryBarrier barrier =
2049 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2055 context.getImageLayout(),
2056 context.getImageLayout(),
2058 vk::VK_QUEUE_FAMILY_IGNORED,
2059 vk::VK_QUEUE_FAMILY_IGNORED,
2063 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2068 const void* const barriers[] =
2073 vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, 0, 1, &barriers[0]);
2078 DE_FATAL("Unknown pipeline barrier type");
2082 class ImageTransition : public CmdCommand
2085 ImageTransition (void) {}
2086 ~ImageTransition (void) {}
2087 const char* getName (void) const { return "ImageTransition"; }
2089 void logSubmit (TestLog& log, size_t commandIndex) const;
2090 void submit (SubmitContext& context);
2093 void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
2095 log << TestLog::Message << commandIndex << ":" << getName() << " Use pipeline barrier to trasition to VK_IMAGE_LAYOUT_GENERAL." << TestLog::EndMessage;
2098 void ImageTransition::submit (SubmitContext& context)
2100 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2101 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2102 const vk::VkImageMemoryBarrier barrier =
2104 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2110 context.getImageLayout(),
2111 vk::VK_IMAGE_LAYOUT_GENERAL,
2113 vk::VK_QUEUE_FAMILY_IGNORED,
2114 vk::VK_QUEUE_FAMILY_IGNORED,
2118 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2123 const void* const barriers[] =
2128 vkd.cmdPipelineBarrier(cmd, ALL_PIPELINE_STAGES, ALL_PIPELINE_STAGES, 0, 1, &barriers[0]);
2131 class FillBuffer : public CmdCommand
2134 FillBuffer (deUint32 value) : m_value(value) {}
2135 ~FillBuffer (void) {}
2136 const char* getName (void) const { return "FillBuffer"; }
2138 void logSubmit (TestLog& log, size_t commandIndex) const;
2139 void submit (SubmitContext& context);
2140 void verify (VerifyContext& context, size_t commandIndex);
2143 const deUint32 m_value;
2144 vk::VkDeviceSize m_bufferSize;
2147 void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2149 log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
2152 void FillBuffer::submit (SubmitContext& context)
2154 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2155 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2156 const vk::VkBuffer buffer = context.getBuffer();
2157 const vk::VkDeviceSize sizeMask = ~(0x3ull); // \note Round down to multiple of 4
2159 m_bufferSize = sizeMask & context.getBufferSize();
2160 vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
2163 void FillBuffer::verify (VerifyContext& context, size_t)
2165 ReferenceMemory& reference = context.getReference();
2167 for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
2169 #if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2170 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
2172 reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
2177 class UpdateBuffer : public CmdCommand
2180 UpdateBuffer (deUint32 seed) : m_seed(seed) {}
2181 ~UpdateBuffer (void) {}
2182 const char* getName (void) const { return "UpdateBuffer"; }
2184 void logSubmit (TestLog& log, size_t commandIndex) const;
2185 void submit (SubmitContext& context);
2186 void verify (VerifyContext& context, size_t commandIndex);
2189 const deUint32 m_seed;
2190 vk::VkDeviceSize m_bufferSize;
2193 void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2195 log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
2198 void UpdateBuffer::submit (SubmitContext& context)
2200 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2201 const vk::VkCommandBuffer cmd = context.getCommandBuffer();
2202 const vk::VkBuffer buffer = context.getBuffer();
2203 const size_t blockSize = 65536;
2204 std::vector<deUint8> data (blockSize, 0);
2205 de::Random rng (m_seed);
2207 m_bufferSize = context.getBufferSize();
2209 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2211 for (size_t ndx = 0; ndx < data.size(); ndx++)
2212 data[ndx] = rng.getUint8();
2214 if (m_bufferSize - updated > blockSize)
2215 vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
2217 vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
2221 void UpdateBuffer::verify (VerifyContext& context, size_t)
2223 ReferenceMemory& reference = context.getReference();
2224 const size_t blockSize = 65536;
2225 vector<deUint8> data (blockSize, 0);
2226 de::Random rng (m_seed);
2228 for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2230 for (size_t ndx = 0; ndx < data.size(); ndx++)
2231 data[ndx] = rng.getUint8();
2233 if (m_bufferSize - updated > blockSize)
2234 reference.setData(updated, blockSize, &data[0]);
2236 reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
2240 class BufferCopyToBuffer : public CmdCommand
2243 BufferCopyToBuffer (void) {}
2244 ~BufferCopyToBuffer (void) {}
2245 const char* getName (void) const { return "BufferCopyToBuffer"; }
2247 void logPrepare (TestLog& log, size_t commandIndex) const;
2248 void prepare (PrepareContext& context);
2249 void logSubmit (TestLog& log, size_t commandIndex) const;
2250 void submit (SubmitContext& context);
2251 void verify (VerifyContext& context, size_t commandIndex);
2254 vk::VkDeviceSize m_bufferSize;
2255 vk::Move<vk::VkBuffer> m_dstBuffer;
2256 vk::Move<vk::VkDeviceMemory> m_memory;
2259 void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2261 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
2264 void BufferCopyToBuffer::prepare (PrepareContext& context)
2266 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2267 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2268 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2269 const vk::VkDevice device = context.getContext().getDevice();
2270 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2272 m_bufferSize = context.getBufferSize();
2274 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2275 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2278 void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2280 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
2283 void BufferCopyToBuffer::submit (SubmitContext& context)
2285 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2286 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2287 const vk::VkBufferCopy range =
2293 vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
2296 void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2298 tcu::ResultCollector& resultCollector (context.getResultCollector());
2299 ReferenceMemory& reference (context.getReference());
2300 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2301 const vk::VkDevice device = context.getContext().getDevice();
2302 const vk::VkQueue queue = context.getContext().getQueue();
2303 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2304 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2305 const vk::VkBufferMemoryBarrier barrier =
2307 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2310 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2311 vk::VK_ACCESS_HOST_READ_BIT,
2313 vk::VK_QUEUE_FAMILY_IGNORED,
2314 vk::VK_QUEUE_FAMILY_IGNORED,
2320 const void* barriers[] =
2325 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &barriers[0]);
2327 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2328 queueRun(vkd, queue, *commandBuffer);
2331 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2334 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2337 const deUint8* const data = (const deUint8*)ptr;
2339 for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
2341 if (reference.isDefined(pos))
2343 if (data[pos] != reference.get(pos))
2345 resultCollector.fail(
2346 de::toString(commandIndex) + ":" + getName()
2347 + " Result differs from reference, Expected: "
2348 + de::toString(tcu::toHex<8>(reference.get(pos)))
2350 + de::toString(tcu::toHex<8>(data[pos]))
2352 + de::toString(pos));
2357 reference.set(pos, data[pos]);
2361 vkd.unmapMemory(device, *m_memory);
2364 context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
2368 class BufferCopyFromBuffer : public CmdCommand
2371 BufferCopyFromBuffer (deUint32 seed) : m_seed(seed) {}
2372 ~BufferCopyFromBuffer (void) {}
2373 const char* getName (void) const { return "BufferCopyFromBuffer"; }
2375 void logPrepare (TestLog& log, size_t commandIndex) const;
2376 void prepare (PrepareContext& context);
2377 void logSubmit (TestLog& log, size_t commandIndex) const;
2378 void submit (SubmitContext& context);
2379 void verify (VerifyContext& context, size_t commandIndex);
2382 const deUint32 m_seed;
2383 vk::VkDeviceSize m_bufferSize;
2384 vk::Move<vk::VkBuffer> m_srcBuffer;
2385 vk::Move<vk::VkDeviceMemory> m_memory;
2388 void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2390 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
2393 void BufferCopyFromBuffer::prepare (PrepareContext& context)
2395 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2396 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2397 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2398 const vk::VkDevice device = context.getContext().getDevice();
2399 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2401 m_bufferSize = context.getBufferSize();
2402 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2403 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2406 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
2407 de::Random rng (m_seed);
2410 deUint8* const data = (deUint8*)ptr;
2412 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2413 data[ndx] = rng.getUint8();
2416 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2417 vkd.unmapMemory(device, *m_memory);
2421 void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2423 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
2426 void BufferCopyFromBuffer::submit (SubmitContext& context)
2428 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2429 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2430 const vk::VkBufferCopy range =
2436 vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
2439 void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
2441 ReferenceMemory& reference (context.getReference());
2442 de::Random rng (m_seed);
2444 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2445 reference.set(ndx, rng.getUint8());
2448 class BufferCopyToImage : public CmdCommand
2451 BufferCopyToImage (void) {}
2452 ~BufferCopyToImage (void) {}
2453 const char* getName (void) const { return "BufferCopyToImage"; }
2455 void logPrepare (TestLog& log, size_t commandIndex) const;
2456 void prepare (PrepareContext& context);
2457 void logSubmit (TestLog& log, size_t commandIndex) const;
2458 void submit (SubmitContext& context);
2459 void verify (VerifyContext& context, size_t commandIndex);
2462 deInt32 m_imageWidth;
2463 deInt32 m_imageHeight;
2464 vk::Move<vk::VkImage> m_dstImage;
2465 vk::Move<vk::VkDeviceMemory> m_memory;
2468 void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
2470 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
2473 void BufferCopyToImage::prepare (PrepareContext& context)
2475 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2476 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2477 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2478 const vk::VkDevice device = context.getContext().getDevice();
2479 const vk::VkQueue queue = context.getContext().getQueue();
2480 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2481 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2482 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2484 m_imageWidth = imageSize[0];
2485 m_imageHeight = imageSize[1];
2488 const vk::VkImageCreateInfo createInfo =
2490 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2494 vk::VK_IMAGE_TYPE_2D,
2495 vk::VK_FORMAT_R8G8B8A8_UNORM,
2501 1, 1, // mipLevels, arrayLayers
2502 vk::VK_SAMPLE_COUNT_1_BIT,
2504 vk::VK_IMAGE_TILING_OPTIMAL,
2505 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2506 vk::VK_SHARING_MODE_EXCLUSIVE,
2508 (deUint32)queueFamilies.size(),
2510 vk::VK_IMAGE_LAYOUT_UNDEFINED
2513 m_dstImage = vk::createImage(vkd, device, &createInfo);
2516 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
2519 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2520 const vk::VkImageMemoryBarrier barrier =
2522 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2526 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2528 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2529 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2531 vk::VK_QUEUE_FAMILY_IGNORED,
2532 vk::VK_QUEUE_FAMILY_IGNORED,
2536 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2538 1, // Mip level count
2543 const void* barriers[] =
2548 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &barriers[0]);
2550 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2551 queueRun(vkd, queue, *commandBuffer);
2555 void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
2557 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
2560 void BufferCopyToImage::submit (SubmitContext& context)
2562 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2563 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2564 const vk::VkBufferImageCopy region =
2569 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2582 vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2585 void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
2587 tcu::ResultCollector& resultCollector (context.getResultCollector());
2588 ReferenceMemory& reference (context.getReference());
2589 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2590 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2591 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2592 const vk::VkDevice device = context.getContext().getDevice();
2593 const vk::VkQueue queue = context.getContext().getQueue();
2594 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2595 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2596 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2597 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2598 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2600 const vk::VkImageMemoryBarrier imageBarrier =
2602 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2605 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2606 vk::VK_ACCESS_TRANSFER_READ_BIT,
2608 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2609 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2611 vk::VK_QUEUE_FAMILY_IGNORED,
2612 vk::VK_QUEUE_FAMILY_IGNORED,
2616 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2618 1, // Mip level count
2623 const vk::VkBufferMemoryBarrier bufferBarrier =
2625 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2628 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2629 vk::VK_ACCESS_HOST_READ_BIT,
2631 vk::VK_QUEUE_FAMILY_IGNORED,
2632 vk::VK_QUEUE_FAMILY_IGNORED,
2638 const void* preBarriers[] =
2642 const vk::VkBufferImageCopy region =
2647 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2659 const void* postBarriers[] =
2664 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &preBarriers[0]);
2665 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
2666 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &postBarriers[0]);
2669 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2670 queueRun(vkd, queue, *commandBuffer);
2673 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2675 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2678 const deUint8* const data = (const deUint8*)ptr;
2680 for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
2682 if (reference.isDefined(pos))
2684 if (data[pos] != reference.get(pos))
2686 resultCollector.fail(
2687 de::toString(commandIndex) + ":" + getName()
2688 + " Result differs from reference, Expected: "
2689 + de::toString(tcu::toHex<8>(reference.get(pos)))
2691 + de::toString(tcu::toHex<8>(data[pos]))
2693 + de::toString(pos));
2698 reference.set(pos, data[pos]);
2702 vkd.unmapMemory(device, *memory);
2706 class BufferCopyFromImage : public CmdCommand
2709 BufferCopyFromImage (deUint32 seed) : m_seed(seed) {}
2710 ~BufferCopyFromImage (void) {}
2711 const char* getName (void) const { return "BufferCopyFromImage"; }
2713 void logPrepare (TestLog& log, size_t commandIndex) const;
2714 void prepare (PrepareContext& context);
2715 void logSubmit (TestLog& log, size_t commandIndex) const;
2716 void submit (SubmitContext& context);
2717 void verify (VerifyContext& context, size_t commandIndex);
2720 const deUint32 m_seed;
2721 deInt32 m_imageWidth;
2722 deInt32 m_imageHeight;
2723 vk::Move<vk::VkImage> m_srcImage;
2724 vk::Move<vk::VkDeviceMemory> m_memory;
2727 void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
2729 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
2732 void BufferCopyFromImage::prepare (PrepareContext& context)
2734 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2735 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2736 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2737 const vk::VkDevice device = context.getContext().getDevice();
2738 const vk::VkQueue queue = context.getContext().getQueue();
2739 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2740 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2741 const IVec2 imageSize = findImageSizeWxHx4(context.getBufferSize());
2743 m_imageWidth = imageSize[0];
2744 m_imageHeight = imageSize[1];
2747 const vk::VkImageCreateInfo createInfo =
2749 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2753 vk::VK_IMAGE_TYPE_2D,
2754 vk::VK_FORMAT_R8G8B8A8_UNORM,
2760 1, 1, // mipLevels, arrayLayers
2761 vk::VK_SAMPLE_COUNT_1_BIT,
2763 vk::VK_IMAGE_TILING_OPTIMAL,
2764 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2765 vk::VK_SHARING_MODE_EXCLUSIVE,
2767 (deUint32)queueFamilies.size(),
2769 vk::VK_IMAGE_LAYOUT_UNDEFINED
2772 m_srcImage = vk::createImage(vkd, device, &createInfo);
2775 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
2778 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2779 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2780 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2781 const vk::VkImageMemoryBarrier preImageBarrier =
2783 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2787 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2789 vk::VK_IMAGE_LAYOUT_UNDEFINED,
2790 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2792 vk::VK_QUEUE_FAMILY_IGNORED,
2793 vk::VK_QUEUE_FAMILY_IGNORED,
2797 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2799 1, // Mip level count
2804 const vk::VkImageMemoryBarrier postImageBarrier =
2806 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2810 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2812 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2813 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2815 vk::VK_QUEUE_FAMILY_IGNORED,
2816 vk::VK_QUEUE_FAMILY_IGNORED,
2820 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2822 1, // Mip level count
2827 const void* preBarriers[] =
2831 const void* postBarriers[] =
2835 const vk::VkBufferImageCopy region =
2840 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2854 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2855 de::Random rng (m_seed);
2858 deUint8* const data = (deUint8*)ptr;
2860 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2861 data[ndx] = rng.getUint8();
2864 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2865 vkd.unmapMemory(device, *memory);
2868 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &preBarriers[0]);
2869 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
2870 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &postBarriers[0]);
2872 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
2873 queueRun(vkd, queue, *commandBuffer);
2877 void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
2879 log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
2882 void BufferCopyFromImage::submit (SubmitContext& context)
2884 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2885 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2886 const vk::VkBufferImageCopy region =
2891 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2904 vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, ®ion);
2907 void BufferCopyFromImage::verify (VerifyContext& context, size_t)
2909 ReferenceMemory& reference (context.getReference());
2910 de::Random rng (m_seed);
2912 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2913 reference.set(ndx, rng.getUint8());
2916 class ImageCopyToBuffer : public CmdCommand
2919 ImageCopyToBuffer (void) {}
2920 ~ImageCopyToBuffer (void) {}
2921 const char* getName (void) const { return "BufferCopyToImage"; }
2923 void logPrepare (TestLog& log, size_t commandIndex) const;
2924 void prepare (PrepareContext& context);
2925 void logSubmit (TestLog& log, size_t commandIndex) const;
2926 void submit (SubmitContext& context);
2927 void verify (VerifyContext& context, size_t commandIndex);
2930 vk::VkDeviceSize m_bufferSize;
2931 vk::Move<vk::VkBuffer> m_dstBuffer;
2932 vk::Move<vk::VkDeviceMemory> m_memory;
2933 vk::VkDeviceSize m_imageMemorySize;
2934 deInt32 m_imageWidth;
2935 deInt32 m_imageHeight;
2938 void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2940 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
2943 void ImageCopyToBuffer::prepare (PrepareContext& context)
2945 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
2946 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2947 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
2948 const vk::VkDevice device = context.getContext().getDevice();
2949 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
2951 m_imageWidth = context.getImageWidth();
2952 m_imageHeight = context.getImageHeight();
2953 m_bufferSize = 4 * m_imageWidth * m_imageHeight;
2954 m_imageMemorySize = context.getImageMemorySize();
2955 m_dstBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2956 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2959 void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2961 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
2964 void ImageCopyToBuffer::submit (SubmitContext& context)
2966 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2967 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
2968 const vk::VkBufferImageCopy region =
2973 vk::VK_IMAGE_ASPECT_COLOR_BIT,
2986 vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstBuffer, 1, ®ion);
2989 void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2991 tcu::ResultCollector& resultCollector (context.getResultCollector());
2992 ReferenceMemory& reference (context.getReference());
2993 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
2994 const vk::VkDevice device = context.getContext().getDevice();
2995 const vk::VkQueue queue = context.getContext().getQueue();
2996 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
2997 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2998 const vk::VkBufferMemoryBarrier barrier =
3000 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3003 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3004 vk::VK_ACCESS_HOST_READ_BIT,
3006 vk::VK_QUEUE_FAMILY_IGNORED,
3007 vk::VK_QUEUE_FAMILY_IGNORED,
3013 const void* barriers[] =
3018 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &barriers[0]);
3020 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3021 queueRun(vkd, queue, *commandBuffer);
3023 reference.setUndefined(0, (size_t)m_imageMemorySize);
3025 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3026 const ConstPixelBufferAccess referenceImage (context.getReferenceImage().getAccess());
3027 const ConstPixelBufferAccess resultImage (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
3029 vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3031 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3032 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3034 vkd.unmapMemory(device, *m_memory);
3038 class ImageCopyFromBuffer : public CmdCommand
3041 ImageCopyFromBuffer (deUint32 seed) : m_seed(seed) {}
3042 ~ImageCopyFromBuffer (void) {}
3043 const char* getName (void) const { return "ImageCopyFromBuffer"; }
3045 void logPrepare (TestLog& log, size_t commandIndex) const;
3046 void prepare (PrepareContext& context);
3047 void logSubmit (TestLog& log, size_t commandIndex) const;
3048 void submit (SubmitContext& context);
3049 void verify (VerifyContext& context, size_t commandIndex);
3052 const deUint32 m_seed;
3053 deInt32 m_imageWidth;
3054 deInt32 m_imageHeight;
3055 vk::VkDeviceSize m_imageMemorySize;
3056 vk::VkDeviceSize m_bufferSize;
3057 vk::Move<vk::VkBuffer> m_srcBuffer;
3058 vk::Move<vk::VkDeviceMemory> m_memory;
3061 void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
3063 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
3066 void ImageCopyFromBuffer::prepare (PrepareContext& context)
3068 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3069 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3070 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3071 const vk::VkDevice device = context.getContext().getDevice();
3072 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3074 m_imageWidth = context.getImageHeight();
3075 m_imageHeight = context.getImageWidth();
3076 m_imageMemorySize = context.getImageMemorySize();
3077 m_bufferSize = m_imageWidth * m_imageHeight * 4;
3078 m_srcBuffer = createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
3079 m_memory = bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
3082 void* const ptr = mapMemory(vkd, device, *m_memory, m_bufferSize);
3083 de::Random rng (m_seed);
3086 deUint8* const data = (deUint8*)ptr;
3088 for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
3089 data[ndx] = rng.getUint8();
3092 vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3093 vkd.unmapMemory(device, *m_memory);
3097 void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
3099 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
3102 void ImageCopyFromBuffer::submit (SubmitContext& context)
3104 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3105 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3106 const vk::VkBufferImageCopy region =
3111 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3124 vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), context.getImageLayout(), 1, ®ion);
3127 void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
3129 ReferenceMemory& reference (context.getReference());
3130 de::Random rng (m_seed);
3132 reference.setUndefined(0, (size_t)m_imageMemorySize);
3135 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3137 for (deInt32 y = 0; y < m_imageHeight; y++)
3138 for (deInt32 x = 0; x < m_imageWidth; x++)
3140 const deUint8 r8 = rng.getUint8();
3141 const deUint8 g8 = rng.getUint8();
3142 const deUint8 b8 = rng.getUint8();
3143 const deUint8 a8 = rng.getUint8();
3145 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3150 class ImageCopyFromImage : public CmdCommand
3153 ImageCopyFromImage (deUint32 seed) : m_seed(seed) {}
3154 ~ImageCopyFromImage (void) {}
3155 const char* getName (void) const { return "ImageCopyFromImage"; }
3157 void logPrepare (TestLog& log, size_t commandIndex) const;
3158 void prepare (PrepareContext& context);
3159 void logSubmit (TestLog& log, size_t commandIndex) const;
3160 void submit (SubmitContext& context);
3161 void verify (VerifyContext& context, size_t commandIndex);
3164 const deUint32 m_seed;
3165 deInt32 m_imageWidth;
3166 deInt32 m_imageHeight;
3167 vk::VkDeviceSize m_imageMemorySize;
3168 vk::Move<vk::VkImage> m_srcImage;
3169 vk::Move<vk::VkDeviceMemory> m_memory;
3172 void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3174 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
3177 void ImageCopyFromImage::prepare (PrepareContext& context)
3179 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3180 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3181 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3182 const vk::VkDevice device = context.getContext().getDevice();
3183 const vk::VkQueue queue = context.getContext().getQueue();
3184 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3185 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3187 m_imageWidth = context.getImageWidth();
3188 m_imageHeight = context.getImageHeight();
3189 m_imageMemorySize = context.getImageMemorySize();
3192 const vk::VkImageCreateInfo createInfo =
3194 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3198 vk::VK_IMAGE_TYPE_2D,
3199 vk::VK_FORMAT_R8G8B8A8_UNORM,
3205 1, 1, // mipLevels, arrayLayers
3206 vk::VK_SAMPLE_COUNT_1_BIT,
3208 vk::VK_IMAGE_TILING_OPTIMAL,
3209 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3210 vk::VK_SHARING_MODE_EXCLUSIVE,
3212 (deUint32)queueFamilies.size(),
3214 vk::VK_IMAGE_LAYOUT_UNDEFINED
3217 m_srcImage = vk::createImage(vkd, device, &createInfo);
3220 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3223 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3224 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3225 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3226 const vk::VkImageMemoryBarrier preImageBarrier =
3228 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3232 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3234 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3235 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3237 vk::VK_QUEUE_FAMILY_IGNORED,
3238 vk::VK_QUEUE_FAMILY_IGNORED,
3242 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3244 1, // Mip level count
3249 const vk::VkImageMemoryBarrier postImageBarrier =
3251 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3255 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3257 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3258 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3260 vk::VK_QUEUE_FAMILY_IGNORED,
3261 vk::VK_QUEUE_FAMILY_IGNORED,
3265 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3267 1, // Mip level count
3272 const void* preBarriers[] =
3276 const void* postBarriers[] =
3280 const vk::VkBufferImageCopy region =
3285 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3299 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3300 de::Random rng (m_seed);
3303 deUint8* const data = (deUint8*)ptr;
3305 for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
3306 data[ndx] = rng.getUint8();
3309 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3310 vkd.unmapMemory(device, *memory);
3313 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &preBarriers[0]);
3314 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3315 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &postBarriers[0]);
3317 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3318 queueRun(vkd, queue, *commandBuffer);
3322 void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3324 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
3327 void ImageCopyFromImage::submit (SubmitContext& context)
3329 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3330 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3331 const vk::VkImageCopy region =
3334 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3342 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3355 vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), context.getImageLayout(), 1, ®ion);
3358 void ImageCopyFromImage::verify (VerifyContext& context, size_t)
3360 ReferenceMemory& reference (context.getReference());
3361 de::Random rng (m_seed);
3363 reference.setUndefined(0, (size_t)m_imageMemorySize);
3366 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3368 for (deInt32 y = 0; y < m_imageHeight; y++)
3369 for (deInt32 x = 0; x < m_imageWidth; x++)
3371 const deUint8 r8 = rng.getUint8();
3372 const deUint8 g8 = rng.getUint8();
3373 const deUint8 b8 = rng.getUint8();
3374 const deUint8 a8 = rng.getUint8();
3376 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3381 class ImageCopyToImage : public CmdCommand
3384 ImageCopyToImage (void) {}
3385 ~ImageCopyToImage (void) {}
3386 const char* getName (void) const { return "ImageCopyToImage"; }
3388 void logPrepare (TestLog& log, size_t commandIndex) const;
3389 void prepare (PrepareContext& context);
3390 void logSubmit (TestLog& log, size_t commandIndex) const;
3391 void submit (SubmitContext& context);
3392 void verify (VerifyContext& context, size_t commandIndex);
3395 deInt32 m_imageWidth;
3396 deInt32 m_imageHeight;
3397 vk::VkDeviceSize m_imageMemorySize;
3398 vk::Move<vk::VkImage> m_dstImage;
3399 vk::Move<vk::VkDeviceMemory> m_memory;
3402 void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
3404 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
3407 void ImageCopyToImage::prepare (PrepareContext& context)
3409 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3410 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3411 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3412 const vk::VkDevice device = context.getContext().getDevice();
3413 const vk::VkQueue queue = context.getContext().getQueue();
3414 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3415 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3417 m_imageWidth = context.getImageWidth();
3418 m_imageHeight = context.getImageHeight();
3419 m_imageMemorySize = context.getImageMemorySize();
3422 const vk::VkImageCreateInfo createInfo =
3424 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3428 vk::VK_IMAGE_TYPE_2D,
3429 vk::VK_FORMAT_R8G8B8A8_UNORM,
3435 1, 1, // mipLevels, arrayLayers
3436 vk::VK_SAMPLE_COUNT_1_BIT,
3438 vk::VK_IMAGE_TILING_OPTIMAL,
3439 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3440 vk::VK_SHARING_MODE_EXCLUSIVE,
3442 (deUint32)queueFamilies.size(),
3444 vk::VK_IMAGE_LAYOUT_UNDEFINED
3447 m_dstImage = vk::createImage(vkd, device, &createInfo);
3450 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3453 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3454 const vk::VkImageMemoryBarrier barrier =
3456 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3460 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3462 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3463 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3465 vk::VK_QUEUE_FAMILY_IGNORED,
3466 vk::VK_QUEUE_FAMILY_IGNORED,
3470 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3472 1, // Mip level count
3477 const void* barriers[] =
3482 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &barriers[0]);
3484 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3485 queueRun(vkd, queue, *commandBuffer);
3489 void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
3491 log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
3494 void ImageCopyToImage::submit (SubmitContext& context)
3496 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3497 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3498 const vk::VkImageCopy region =
3501 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3509 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3522 vkd.cmdCopyImage(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, ®ion);
3525 void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
3527 tcu::ResultCollector& resultCollector (context.getResultCollector());
3528 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3529 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3530 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3531 const vk::VkDevice device = context.getContext().getDevice();
3532 const vk::VkQueue queue = context.getContext().getQueue();
3533 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3534 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3535 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3536 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3537 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3539 const vk::VkImageMemoryBarrier imageBarrier =
3541 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3544 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3545 vk::VK_ACCESS_TRANSFER_READ_BIT,
3547 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3548 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3550 vk::VK_QUEUE_FAMILY_IGNORED,
3551 vk::VK_QUEUE_FAMILY_IGNORED,
3555 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3557 1, // Mip level count
3562 const vk::VkBufferMemoryBarrier bufferBarrier =
3564 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3567 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3568 vk::VK_ACCESS_HOST_READ_BIT,
3570 vk::VK_QUEUE_FAMILY_IGNORED,
3571 vk::VK_QUEUE_FAMILY_IGNORED,
3577 const void* preBarriers[] =
3581 const vk::VkBufferImageCopy region =
3586 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3598 const void* postBarriers[] =
3603 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &preBarriers[0]);
3604 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
3605 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &postBarriers[0]);
3608 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3609 queueRun(vkd, queue, *commandBuffer);
3612 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3614 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3617 const deUint8* const data = (const deUint8*)ptr;
3618 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
3619 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3621 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3622 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3625 vkd.unmapMemory(device, *memory);
3635 class ImageBlitFromImage : public CmdCommand
3638 ImageBlitFromImage (deUint32 seed, BlitScale scale) : m_seed(seed), m_scale(scale) {}
3639 ~ImageBlitFromImage (void) {}
3640 const char* getName (void) const { return "ImageBlitFromImage"; }
3642 void logPrepare (TestLog& log, size_t commandIndex) const;
3643 void prepare (PrepareContext& context);
3644 void logSubmit (TestLog& log, size_t commandIndex) const;
3645 void submit (SubmitContext& context);
3646 void verify (VerifyContext& context, size_t commandIndex);
3649 const deUint32 m_seed;
3650 const BlitScale m_scale;
3651 deInt32 m_imageWidth;
3652 deInt32 m_imageHeight;
3653 vk::VkDeviceSize m_imageMemorySize;
3654 deInt32 m_srcImageWidth;
3655 deInt32 m_srcImageHeight;
3656 vk::Move<vk::VkImage> m_srcImage;
3657 vk::Move<vk::VkDeviceMemory> m_memory;
3660 void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3662 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
3665 void ImageBlitFromImage::prepare (PrepareContext& context)
3667 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3668 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3669 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3670 const vk::VkDevice device = context.getContext().getDevice();
3671 const vk::VkQueue queue = context.getContext().getQueue();
3672 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3673 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3675 m_imageWidth = context.getImageWidth();
3676 m_imageHeight = context.getImageHeight();
3677 m_imageMemorySize = context.getImageMemorySize();
3679 if (m_scale == BLIT_SCALE_10)
3681 m_srcImageWidth = m_imageWidth;
3682 m_srcImageHeight = m_imageHeight;
3684 else if (m_scale == BLIT_SCALE_20)
3686 m_srcImageWidth = m_imageWidth / 2;
3687 m_srcImageHeight = m_imageHeight / 2;
3690 DE_FATAL("Unsupported scale");
3693 const vk::VkImageCreateInfo createInfo =
3695 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3699 vk::VK_IMAGE_TYPE_2D,
3700 vk::VK_FORMAT_R8G8B8A8_UNORM,
3706 1, 1, // mipLevels, arrayLayers
3707 vk::VK_SAMPLE_COUNT_1_BIT,
3709 vk::VK_IMAGE_TILING_OPTIMAL,
3710 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3711 vk::VK_SHARING_MODE_EXCLUSIVE,
3713 (deUint32)queueFamilies.size(),
3715 vk::VK_IMAGE_LAYOUT_UNDEFINED
3718 m_srcImage = vk::createImage(vkd, device, &createInfo);
3721 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3724 const vk::Unique<vk::VkBuffer> srcBuffer (createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3725 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3726 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3727 const vk::VkImageMemoryBarrier preImageBarrier =
3729 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3733 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3735 vk::VK_IMAGE_LAYOUT_UNDEFINED,
3736 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3738 vk::VK_QUEUE_FAMILY_IGNORED,
3739 vk::VK_QUEUE_FAMILY_IGNORED,
3743 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3745 1, // Mip level count
3750 const vk::VkImageMemoryBarrier postImageBarrier =
3752 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3756 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3758 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3759 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3761 vk::VK_QUEUE_FAMILY_IGNORED,
3762 vk::VK_QUEUE_FAMILY_IGNORED,
3766 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3768 1, // Mip level count
3773 const void* preBarriers[] =
3777 const void* postBarriers[] =
3781 const vk::VkBufferImageCopy region =
3786 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3800 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
3801 de::Random rng (m_seed);
3804 deUint8* const data = (deUint8*)ptr;
3806 for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
3807 data[ndx] = rng.getUint8();
3810 vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
3811 vkd.unmapMemory(device, *memory);
3814 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &preBarriers[0]);
3815 vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
3816 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &postBarriers[0]);
3818 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
3819 queueRun(vkd, queue, *commandBuffer);
3823 void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3825 log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
3828 void ImageBlitFromImage::submit (SubmitContext& context)
3830 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3831 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
3832 const vk::VkImageBlit region =
3836 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3850 vk::VK_IMAGE_ASPECT_COLOR_BIT,
3862 vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, context.getImage(), context.getImageLayout(), 1, ®ion, vk::VK_FILTER_NEAREST);
3865 void ImageBlitFromImage::verify (VerifyContext& context, size_t)
3867 ReferenceMemory& reference (context.getReference());
3868 de::Random rng (m_seed);
3870 reference.setUndefined(0, (size_t)m_imageMemorySize);
3873 const PixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
3875 if (m_scale == BLIT_SCALE_10)
3877 for (deInt32 y = 0; y < m_imageHeight; y++)
3878 for (deInt32 x = 0; x < m_imageWidth; x++)
3880 const deUint8 r8 = rng.getUint8();
3881 const deUint8 g8 = rng.getUint8();
3882 const deUint8 b8 = rng.getUint8();
3883 const deUint8 a8 = rng.getUint8();
3885 refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3888 else if (m_scale == BLIT_SCALE_20)
3890 tcu::TextureLevel source (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
3892 for (deInt32 y = 0; y < m_srcImageHeight; y++)
3893 for (deInt32 x = 0; x < m_srcImageWidth; x++)
3895 const deUint8 r8 = rng.getUint8();
3896 const deUint8 g8 = rng.getUint8();
3897 const deUint8 b8 = rng.getUint8();
3898 const deUint8 a8 = rng.getUint8();
3900 source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
3903 for (deInt32 y = 0; y < m_imageHeight; y++)
3904 for (deInt32 x = 0; x < m_imageWidth; x++)
3905 refAccess.setPixel(source.getAccess().getPixelUint(x / 2, y / 2), x, y);
3908 DE_FATAL("Unsupported scale");
3912 class ImageBlitToImage : public CmdCommand
3915 ImageBlitToImage (BlitScale scale) : m_scale(scale) {}
3916 ~ImageBlitToImage (void) {}
3917 const char* getName (void) const { return "ImageBlitToImage"; }
3919 void logPrepare (TestLog& log, size_t commandIndex) const;
3920 void prepare (PrepareContext& context);
3921 void logSubmit (TestLog& log, size_t commandIndex) const;
3922 void submit (SubmitContext& context);
3923 void verify (VerifyContext& context, size_t commandIndex);
3926 const BlitScale m_scale;
3927 deInt32 m_imageWidth;
3928 deInt32 m_imageHeight;
3929 vk::VkDeviceSize m_imageMemorySize;
3930 deInt32 m_dstImageWidth;
3931 deInt32 m_dstImageHeight;
3932 vk::Move<vk::VkImage> m_dstImage;
3933 vk::Move<vk::VkDeviceMemory> m_memory;
3936 void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
3938 log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
3941 void ImageBlitToImage::prepare (PrepareContext& context)
3943 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
3944 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
3945 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
3946 const vk::VkDevice device = context.getContext().getDevice();
3947 const vk::VkQueue queue = context.getContext().getQueue();
3948 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
3949 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
3951 m_imageWidth = context.getImageWidth();
3952 m_imageHeight = context.getImageHeight();
3953 m_imageMemorySize = context.getImageMemorySize();
3955 if (m_scale == BLIT_SCALE_10)
3957 m_dstImageWidth = context.getImageWidth();
3958 m_dstImageHeight = context.getImageHeight();
3960 else if (m_scale == BLIT_SCALE_20)
3962 m_dstImageWidth = context.getImageWidth() * 2;
3963 m_dstImageHeight = context.getImageHeight() * 2;
3966 DE_FATAL("Unsupportd blit scale");
3969 const vk::VkImageCreateInfo createInfo =
3971 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3975 vk::VK_IMAGE_TYPE_2D,
3976 vk::VK_FORMAT_R8G8B8A8_UNORM,
3982 1, 1, // mipLevels, arrayLayers
3983 vk::VK_SAMPLE_COUNT_1_BIT,
3985 vk::VK_IMAGE_TILING_OPTIMAL,
3986 vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3987 vk::VK_SHARING_MODE_EXCLUSIVE,
3989 (deUint32)queueFamilies.size(),
3991 vk::VK_IMAGE_LAYOUT_UNDEFINED
3994 m_dstImage = vk::createImage(vkd, device, &createInfo);
3997 m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
4000 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4001 const vk::VkImageMemoryBarrier barrier =
4003 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4007 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4009 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4010 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4012 vk::VK_QUEUE_FAMILY_IGNORED,
4013 vk::VK_QUEUE_FAMILY_IGNORED,
4017 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4019 1, // Mip level count
4024 const void* barriers[] =
4029 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &barriers[0]);
4031 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4032 queueRun(vkd, queue, *commandBuffer);
4036 void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
4038 log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "") << TestLog::EndMessage;
4041 void ImageBlitToImage::submit (SubmitContext& context)
4043 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4044 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4045 const vk::VkImageBlit region =
4049 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4063 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4075 vkd.cmdBlitImage(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion, vk::VK_FILTER_NEAREST);
4078 void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
4080 tcu::ResultCollector& resultCollector (context.getResultCollector());
4081 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4082 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4083 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4084 const vk::VkDevice device = context.getContext().getDevice();
4085 const vk::VkQueue queue = context.getContext().getQueue();
4086 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4087 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4088 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4089 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4090 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4092 const vk::VkImageMemoryBarrier imageBarrier =
4094 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4097 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4098 vk::VK_ACCESS_TRANSFER_READ_BIT,
4100 vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4101 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4103 vk::VK_QUEUE_FAMILY_IGNORED,
4104 vk::VK_QUEUE_FAMILY_IGNORED,
4108 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4110 1, // Mip level count
4115 const vk::VkBufferMemoryBarrier bufferBarrier =
4117 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4120 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4121 vk::VK_ACCESS_HOST_READ_BIT,
4123 vk::VK_QUEUE_FAMILY_IGNORED,
4124 vk::VK_QUEUE_FAMILY_IGNORED,
4130 const void* preBarriers[] =
4134 const vk::VkBufferImageCopy region =
4139 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4151 const void* postBarriers[] =
4156 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &preBarriers[0]);
4157 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4158 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &postBarriers[0]);
4161 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4162 queueRun(vkd, queue, *commandBuffer);
4165 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
4167 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_dstImageWidth * m_dstImageHeight);
4169 if (m_scale == BLIT_SCALE_10)
4171 const deUint8* const data = (const deUint8*)ptr;
4172 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4173 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4175 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4176 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4178 else if (m_scale == BLIT_SCALE_20)
4180 const deUint8* const data = (const deUint8*)ptr;
4181 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4182 tcu::TextureLevel reference (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
4185 const ConstPixelBufferAccess& refAccess (context.getReferenceImage().getAccess());
4187 for (deInt32 y = 0; y < m_dstImageHeight; y++)
4188 for (deInt32 x = 0; x < m_dstImageWidth; x++)
4190 reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
4194 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4195 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4198 DE_FATAL("Unknown scale");
4200 vkd.unmapMemory(device, *memory);
4204 class PrepareRenderPassContext
4207 PrepareRenderPassContext (PrepareContext& context,
4208 vk::VkRenderPass renderPass,
4209 vk::VkFramebuffer framebuffer,
4210 deInt32 targetWidth,
4211 deInt32 targetHeight)
4212 : m_context (context)
4213 , m_renderPass (renderPass)
4214 , m_framebuffer (framebuffer)
4215 , m_targetWidth (targetWidth)
4216 , m_targetHeight (targetHeight)
4220 const Memory& getMemory (void) const { return m_context.getMemory(); }
4221 const Context& getContext (void) const { return m_context.getContext(); }
4222 const vk::ProgramCollection<vk::ProgramBinary>& getBinaryCollection (void) const { return m_context.getBinaryCollection(); }
4224 vk::VkBuffer getBuffer (void) const { return m_context.getBuffer(); }
4225 vk::VkDeviceSize getBufferSize (void) const { return m_context.getBufferSize(); }
4227 vk::VkImage getImage (void) const { return m_context.getImage(); }
4228 deInt32 getImageWidth (void) const { return m_context.getImageWidth(); }
4229 deInt32 getImageHeight (void) const { return m_context.getImageHeight(); }
4230 vk::VkImageLayout getImageLayout (void) const { return m_context.getImageLayout(); }
4232 deInt32 getTargetWidth (void) const { return m_targetWidth; }
4233 deInt32 getTargetHeight (void) const { return m_targetHeight; }
4235 vk::VkRenderPass getRenderPass (void) const { return m_renderPass; }
4238 PrepareContext& m_context;
4239 const vk::VkRenderPass m_renderPass;
4240 const vk::VkFramebuffer m_framebuffer;
4241 const deInt32 m_targetWidth;
4242 const deInt32 m_targetHeight;
4245 class VerifyRenderPassContext
4248 VerifyRenderPassContext (VerifyContext& context,
4249 deInt32 targetWidth,
4250 deInt32 targetHeight)
4251 : m_context (context)
4252 , m_referenceTarget (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
4256 const Context& getContext (void) const { return m_context.getContext(); }
4257 TestLog& getLog (void) const { return m_context.getLog(); }
4258 tcu::ResultCollector& getResultCollector (void) const { return m_context.getResultCollector(); }
4260 TextureLevel& getReferenceTarget (void) { return m_referenceTarget; }
4262 ReferenceMemory& getReference (void) { return m_context.getReference(); }
4263 TextureLevel& getReferenceImage (void) { return m_context.getReferenceImage();}
4266 VerifyContext& m_context;
4267 TextureLevel m_referenceTarget;
4271 class RenderPassCommand
4274 virtual ~RenderPassCommand (void) {}
4275 virtual const char* getName (void) const = 0;
4277 // Log things that are done during prepare
4278 virtual void logPrepare (TestLog&, size_t) const {}
4279 // Log submitted calls etc.
4280 virtual void logSubmit (TestLog&, size_t) const {}
4282 // Allocate vulkan resources and prepare for submit.
4283 virtual void prepare (PrepareRenderPassContext&) {}
4285 // Submit commands to command buffer.
4286 virtual void submit (SubmitContext&) {}
4289 virtual void verify (VerifyRenderPassContext&, size_t) {}
4292 class SubmitRenderPass : public CmdCommand
4295 SubmitRenderPass (const vector<RenderPassCommand*>& commands);
4296 ~SubmitRenderPass (void) {}
4297 const char* getName (void) const { return "SubmitRenderPass"; }
4299 void logPrepare (TestLog&, size_t) const;
4300 void logSubmit (TestLog&, size_t) const;
4302 void prepare (PrepareContext&);
4303 void submit (SubmitContext&);
4305 void verify (VerifyContext&, size_t);
4308 const deInt32 m_targetWidth;
4309 const deInt32 m_targetHeight;
4310 vk::Move<vk::VkRenderPass> m_renderPass;
4311 vk::Move<vk::VkDeviceMemory> m_colorTargetMemory;
4312 de::MovePtr<vk::Allocation> m_colorTargetMemory2;
4313 vk::Move<vk::VkImage> m_colorTarget;
4314 vk::Move<vk::VkImageView> m_colorTargetView;
4315 vk::Move<vk::VkFramebuffer> m_framebuffer;
4316 vector<RenderPassCommand*> m_commands;
4319 SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
4320 : m_targetWidth (256)
4321 , m_targetHeight (256)
4322 , m_commands (commands)
4326 void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
4328 const string sectionName (de::toString(commandIndex) + ":" + getName());
4329 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4331 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4333 RenderPassCommand& command = *m_commands[cmdNdx];
4334 command.logPrepare(log, cmdNdx);
4338 void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
4340 const string sectionName (de::toString(commandIndex) + ":" + getName());
4341 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4343 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4345 RenderPassCommand& command = *m_commands[cmdNdx];
4346 command.logSubmit(log, cmdNdx);
4350 void SubmitRenderPass::prepare (PrepareContext& context)
4352 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4353 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4354 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4355 const vk::VkDevice device = context.getContext().getDevice();
4356 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4358 const vk::VkAttachmentReference colorAttachments[] =
4360 { 0, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }
4362 const vk::VkSubpassDescription subpass =
4365 vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
4370 DE_LENGTH_OF_ARRAY(colorAttachments),
4377 const vk::VkAttachmentDescription attachment =
4380 vk::VK_FORMAT_R8G8B8A8_UNORM,
4381 vk::VK_SAMPLE_COUNT_1_BIT,
4383 vk::VK_ATTACHMENT_LOAD_OP_CLEAR,
4384 vk::VK_ATTACHMENT_STORE_OP_STORE,
4386 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
4387 vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,
4389 vk::VK_IMAGE_LAYOUT_UNDEFINED,
4390 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
4393 const vk::VkImageCreateInfo createInfo =
4395 vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
4399 vk::VK_IMAGE_TYPE_2D,
4400 vk::VK_FORMAT_R8G8B8A8_UNORM,
4401 { m_targetWidth, m_targetHeight, 1 },
4404 vk::VK_SAMPLE_COUNT_1_BIT,
4405 vk::VK_IMAGE_TILING_OPTIMAL,
4406 vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
4407 vk::VK_SHARING_MODE_EXCLUSIVE,
4408 (deUint32)queueFamilies.size(),
4410 vk::VK_IMAGE_LAYOUT_UNDEFINED
4413 m_colorTarget = vk::createImage(vkd, device, &createInfo);
4416 m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
4419 const vk::VkImageViewCreateInfo createInfo =
4421 vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4426 vk::VK_IMAGE_VIEW_TYPE_2D,
4427 vk::VK_FORMAT_R8G8B8A8_UNORM,
4429 vk::VK_COMPONENT_SWIZZLE_R,
4430 vk::VK_COMPONENT_SWIZZLE_G,
4431 vk::VK_COMPONENT_SWIZZLE_B,
4432 vk::VK_COMPONENT_SWIZZLE_A
4435 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4443 m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
4446 const vk::VkRenderPassCreateInfo createInfo =
4448 vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
4462 m_renderPass = vk::createRenderPass(vkd, device, &createInfo);
4466 const vk::VkImageView imageViews[] =
4470 const vk::VkFramebufferCreateInfo createInfo =
4472 vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4477 DE_LENGTH_OF_ARRAY(imageViews),
4479 (deUint32)m_targetWidth,
4480 (deUint32)m_targetHeight,
4484 m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
4488 PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
4490 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4492 RenderPassCommand& command = *m_commands[cmdNdx];
4493 command.prepare(renderpassContext);
4498 void SubmitRenderPass::submit (SubmitContext& context)
4500 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4501 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4502 const vk::VkClearValue clearValue = vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 1.0f);
4504 const vk::VkRenderPassBeginInfo beginInfo =
4506 vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
4512 { { 0, 0 }, { m_targetWidth, m_targetHeight } },
4517 vkd.cmdBeginRenderPass(commandBuffer, &beginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
4519 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4521 RenderPassCommand& command = *m_commands[cmdNdx];
4523 command.submit(context);
4526 vkd.cmdEndRenderPass(commandBuffer);
4529 void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
4531 TestLog& log (context.getLog());
4532 tcu::ResultCollector& resultCollector (context.getResultCollector());
4533 const string sectionName (de::toString(commandIndex) + ":" + getName());
4534 const tcu::ScopedLogSection section (log, sectionName, sectionName);
4535 VerifyRenderPassContext verifyContext (context, m_targetWidth, m_targetHeight);
4537 tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4539 for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4541 RenderPassCommand& command = *m_commands[cmdNdx];
4542 command.verify(verifyContext, cmdNdx);
4546 const vk::InstanceInterface& vki = context.getContext().getInstanceInterface();
4547 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4548 const vk::VkPhysicalDevice physicalDevice = context.getContext().getPhysicalDevice();
4549 const vk::VkDevice device = context.getContext().getDevice();
4550 const vk::VkQueue queue = context.getContext().getQueue();
4551 const vk::VkCommandPool commandPool = context.getContext().getCommandPool();
4552 const vk::Unique<vk::VkCommandBuffer> commandBuffer (createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4553 const vector<deUint32>& queueFamilies = context.getContext().getQueueFamilies();
4554 const vk::Unique<vk::VkBuffer> dstBuffer (createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4555 const vk::Unique<vk::VkDeviceMemory> memory (bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4557 const vk::VkImageMemoryBarrier imageBarrier =
4559 vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4562 vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
4563 vk::VK_ACCESS_TRANSFER_READ_BIT,
4565 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4566 vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4568 vk::VK_QUEUE_FAMILY_IGNORED,
4569 vk::VK_QUEUE_FAMILY_IGNORED,
4573 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4575 1, // Mip level count
4580 const vk::VkBufferMemoryBarrier bufferBarrier =
4582 vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4585 vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4586 vk::VK_ACCESS_HOST_READ_BIT,
4588 vk::VK_QUEUE_FAMILY_IGNORED,
4589 vk::VK_QUEUE_FAMILY_IGNORED,
4595 const void* preBarriers[] =
4599 const vk::VkBufferImageCopy region =
4604 vk::VK_IMAGE_ASPECT_COLOR_BIT,
4616 const void* postBarriers[] =
4621 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &preBarriers[0]);
4622 vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, ®ion);
4623 vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &postBarriers[0]);
4626 VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
4627 queueRun(vkd, queue, *commandBuffer);
4630 void* const ptr = mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
4632 vk::invalidateMappedMemoryRange(vkd, device, *memory, 0, 4 * m_targetWidth * m_targetHeight);
4635 const deUint8* const data = (const deUint8*)ptr;
4636 const ConstPixelBufferAccess resAccess (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
4637 const ConstPixelBufferAccess& refAccess (verifyContext.getReferenceTarget().getAccess());
4639 if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4640 resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4643 vkd.unmapMemory(device, *memory);
4648 class RenderBuffer : public RenderPassCommand
4653 RENDERAS_VERTEX_BUFFER,
4654 RENDERAS_INDEX_BUFFER,
4656 RenderBuffer (RenderAs renderAs) : m_renderAs(renderAs) {}
4657 ~RenderBuffer (void) {}
4659 const char* getName (void) const { return "RenderBuffer"; }
4660 void logPrepare (TestLog&, size_t) const;
4661 void logSubmit (TestLog&, size_t) const;
4662 void prepare (PrepareRenderPassContext&);
4663 void submit (SubmitContext& context);
4664 void verify (VerifyRenderPassContext&, size_t);
4667 const RenderAs m_renderAs;
4668 vk::Move<vk::VkPipeline> m_pipeline;
4669 vk::Move<vk::VkPipelineLayout> m_pipelineLayout;
4670 vk::VkDeviceSize m_bufferSize;
4672 static const vk::ProgramBinary& getVertexShader (const vk::ProgramCollection<vk::ProgramBinary>& collections, RenderAs renderAs)
4676 case RENDERAS_VERTEX_BUFFER:
4677 return collections.get("vertex-buffer.vert");
4679 case RENDERAS_INDEX_BUFFER:
4680 return collections.get("index-buffer.vert");
4683 DE_FATAL("Unknown renderAs");
4684 return collections.get("");
4689 void RenderBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4691 log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4694 void RenderBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4696 log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
4699 void RenderBuffer::prepare (PrepareRenderPassContext& context)
4701 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4702 const vk::VkDevice device = context.getContext().getDevice();
4703 const vk::VkRenderPass renderPass = context.getRenderPass();
4704 const deUint32 subpass = 0;
4705 const vk::Unique<vk::VkShaderModule> vertexShaderModule (vk::createShaderModule(vkd, device, getVertexShader(context.getBinaryCollection(), m_renderAs), 0));
4706 const vk::Unique<vk::VkShaderModule> fragmentShaderModule (vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4708 m_bufferSize = context.getBufferSize();
4711 const vk::VkPipelineLayoutCreateInfo createInfo =
4713 vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
4722 m_pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
4726 const vk::VkPipelineShaderStageCreateInfo shaderStages[] =
4729 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4732 vk::VK_SHADER_STAGE_VERTEX_BIT,
4733 *vertexShaderModule,
4738 vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
4741 vk::VK_SHADER_STAGE_FRAGMENT_BIT,
4742 *fragmentShaderModule,
4747 const vk::VkPipelineDepthStencilStateCreateInfo depthStencilState =
4749 vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
4754 vk::VK_COMPARE_OP_ALWAYS,
4758 vk::VK_STENCIL_OP_KEEP,
4759 vk::VK_STENCIL_OP_KEEP,
4760 vk::VK_STENCIL_OP_KEEP,
4761 vk::VK_COMPARE_OP_ALWAYS,
4767 vk::VK_STENCIL_OP_KEEP,
4768 vk::VK_STENCIL_OP_KEEP,
4769 vk::VK_STENCIL_OP_KEEP,
4770 vk::VK_COMPARE_OP_ALWAYS,
4778 const vk::VkVertexInputBindingDescription vertexBindingDescriptions[] =
4783 vk::VK_VERTEX_INPUT_RATE_VERTEX
4786 const vk::VkVertexInputAttributeDescription vertexAttributeDescriptions[] =
4791 vk::VK_FORMAT_R8G8_UNORM,
4795 const vk::VkPipelineVertexInputStateCreateInfo vertexInputState =
4797 vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
4801 m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexBindingDescriptions) : 0u,
4802 m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexBindingDescriptions : DE_NULL,
4804 m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexAttributeDescriptions) : 0u,
4805 m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexAttributeDescriptions : DE_NULL,
4807 const vk::VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
4809 vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
4812 vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
4815 const vk::VkViewport viewports[] =
4817 { 0.0f, 0.0f, (float)context.getTargetWidth(), (float)context.getTargetHeight(), 0.0f, 1.0f }
4819 const vk::VkRect2D scissors[] =
4821 { { 0, 0 }, { context.getTargetWidth(), context.getTargetHeight() } }
4823 const vk::VkPipelineViewportStateCreateInfo viewportState =
4825 vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
4828 DE_LENGTH_OF_ARRAY(viewports),
4830 DE_LENGTH_OF_ARRAY(scissors),
4833 const vk::VkPipelineRasterizationStateCreateInfo rasterState =
4835 vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
4841 vk::VK_POLYGON_MODE_FILL,
4842 vk::VK_CULL_MODE_NONE,
4843 vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,
4850 const vk::VkSampleMask sampleMask = ~0u;
4851 const vk::VkPipelineMultisampleStateCreateInfo multisampleState =
4853 vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
4857 vk::VK_SAMPLE_COUNT_1_BIT,
4864 const vk::VkPipelineColorBlendAttachmentState attachments[] =
4868 vk::VK_BLEND_FACTOR_ONE,
4869 vk::VK_BLEND_FACTOR_ZERO,
4870 vk::VK_BLEND_OP_ADD,
4871 vk::VK_BLEND_FACTOR_ONE,
4872 vk::VK_BLEND_FACTOR_ZERO,
4873 vk::VK_BLEND_OP_ADD,
4874 (vk::VK_COLOR_COMPONENT_R_BIT|
4875 vk::VK_COLOR_COMPONENT_G_BIT|
4876 vk::VK_COLOR_COMPONENT_B_BIT|
4877 vk::VK_COLOR_COMPONENT_A_BIT)
4880 const vk::VkPipelineColorBlendStateCreateInfo colorBlendState =
4882 vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
4887 vk::VK_LOGIC_OP_COPY,
4888 DE_LENGTH_OF_ARRAY(attachments),
4890 { 0.0f, 0.0f, 0.0f, 0.0f }
4892 const vk::VkGraphicsPipelineCreateInfo createInfo =
4894 vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
4898 DE_LENGTH_OF_ARRAY(shaderStages),
4902 &inputAssemblyState,
4917 m_pipeline = vk::createGraphicsPipeline(vkd, device, 0, &createInfo);
4921 void RenderBuffer::submit (SubmitContext& context)
4923 const vk::DeviceInterface& vkd = context.getContext().getDeviceInterface();
4924 const vk::VkCommandBuffer commandBuffer = context.getCommandBuffer();
4925 const vk::VkDeviceSize offset = 0;
4926 const vk::VkBuffer buffer = context.getBuffer();
4928 vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4930 if (m_renderAs == RENDERAS_VERTEX_BUFFER)
4932 vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
4933 vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
4935 else if (m_renderAs == RENDERAS_INDEX_BUFFER)
4937 vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
4938 vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
4941 DE_FATAL("Unknown renderAs");
4944 void RenderBuffer::verify (VerifyRenderPassContext& context, size_t)
4946 for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4948 const deUint8 x = context.getReference().get(pos * 2);
4949 const deUint8 y = context.getReference().get((pos * 2) + 1);
4951 context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4969 OP_BUFFER_BINDMEMORY,
4971 OP_QUEUE_WAIT_FOR_IDLE,
4972 OP_DEVICE_WAIT_FOR_IDLE,
4974 OP_COMMAND_BUFFER_BEGIN,
4975 OP_COMMAND_BUFFER_END,
4977 // Buffer transfer operations
4981 OP_BUFFER_COPY_TO_BUFFER,
4982 OP_BUFFER_COPY_FROM_BUFFER,
4984 OP_BUFFER_COPY_TO_IMAGE,
4985 OP_BUFFER_COPY_FROM_IMAGE,
4989 OP_IMAGE_BINDMEMORY,
4991 OP_IMAGE_TRANSITION_TO_GENERAL,
4993 OP_IMAGE_COPY_TO_BUFFER,
4994 OP_IMAGE_COPY_FROM_BUFFER,
4996 OP_IMAGE_COPY_TO_IMAGE,
4997 OP_IMAGE_COPY_FROM_IMAGE,
4999 OP_IMAGE_BLIT_TO_IMAGE,
5000 OP_IMAGE_BLIT_FROM_IMAGE,
5004 OP_PIPELINE_BARRIER_GLOBAL,
5005 OP_PIPELINE_BARRIER_BUFFER,
5006 OP_PIPELINE_BARRIER_IMAGE,
5008 // Renderpass operations
5009 OP_RENDERPASS_BEGIN,
5012 // Commands inside render pass
5013 OP_RENDER_VERTEX_BUFFER,
5014 OP_RENDER_INDEX_BUFFER
5020 STAGE_COMMAND_BUFFER,
5025 bool isWriteAccess (vk::VkAccessFlagBits access)
5030 case vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT: return false;
5031 case vk::VK_ACCESS_INDEX_READ_BIT: return false;
5032 case vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: return false;
5033 case vk::VK_ACCESS_UNIFORM_READ_BIT: return false;
5034 case vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: return false;
5035 case vk::VK_ACCESS_SHADER_READ_BIT: return false;
5036 case vk::VK_ACCESS_SHADER_WRITE_BIT: return true;
5037 case vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: return false;
5038 case vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: return true;
5039 case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: return false;
5040 case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: return true;
5041 case vk::VK_ACCESS_TRANSFER_READ_BIT: return false;
5042 case vk::VK_ACCESS_TRANSFER_WRITE_BIT: return true;
5043 case vk::VK_ACCESS_HOST_READ_BIT: return false;
5044 case vk::VK_ACCESS_HOST_WRITE_BIT: return true;
5045 case vk::VK_ACCESS_MEMORY_READ_BIT: return false;
5046 case vk::VK_ACCESS_MEMORY_WRITE_BIT: return true;
5049 DE_FATAL("Unknown access");
5057 CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
5059 bool isValid (vk::VkPipelineStageFlagBits stage,
5060 vk::VkAccessFlagBits access) const;
5062 void perform (vk::VkPipelineStageFlagBits stage,
5063 vk::VkAccessFlagBits access);
5065 void submitCommandBuffer (void);
5067 void getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5068 vk::VkAccessFlags& srcAccesses,
5069 vk::VkPipelineStageFlags& dstStages,
5070 vk::VkAccessFlags& dstAccesses) const;
5072 void barrier (vk::VkPipelineStageFlags srcStages,
5073 vk::VkAccessFlags srcAccesses,
5074 vk::VkPipelineStageFlags dstStages,
5075 vk::VkAccessFlags dstAccesses);
5077 void fullBarrier (void);
5079 // Everything is clean and there is no need for barriers
5080 bool isClean (void) const;
5083 // Limit which stages and accesses are used by the CacheState tracker
5084 const vk::VkPipelineStageFlags m_allowedStages;
5085 const vk::VkAccessFlags m_allowedAccesses;
5087 // [dstStage][srcStage] = srcAccesses
5088 // In stage dstStage write srcAccesses from srcStage are not yet available
5089 vk::VkAccessFlags m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5090 // [dstStage] = dstAccesses
5091 // In stage dstStage ops with dstAccesses are not yet visible
5092 vk::VkAccessFlags m_invisibleOperations[PIPELINESTAGE_LAST];
5094 // [dstStage] = srcStage
5095 // Memory operation in srcStage have not completed before dstStage
5096 vk::VkPipelineStageFlags m_incompleteOperations[PIPELINESTAGE_LAST];
5099 CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
5100 : m_allowedStages (allowedStages)
5101 , m_allowedAccesses (allowedAccesses)
5103 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5105 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5107 if ((dstStage_ & m_allowedStages) == 0)
5110 // All operations are initially visible
5111 m_invisibleOperations[dstStage] = 0;
5113 // There are no incomplete read operations initially
5114 m_incompleteOperations[dstStage] = 0;
5116 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5118 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5120 if ((srcStage_ & m_allowedStages) == 0)
5123 // There are no write operations that are not yet available
5125 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5130 bool CacheState::isValid (vk::VkPipelineStageFlagBits stage,
5131 vk::VkAccessFlagBits access) const
5133 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5134 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5136 const PipelineStage dstStage = pipelineStageFlagToPipelineStage(stage);
5138 // Previous operations are not visible to access on stage
5139 if ((m_invisibleOperations[dstStage] & access) != 0)
5142 if (isWriteAccess(access))
5144 // Memory operations from other stages have not completed before
5146 if (m_incompleteOperations[dstStage] != 0)
5153 void CacheState::perform (vk::VkPipelineStageFlagBits stage,
5154 vk::VkAccessFlagBits access)
5156 DE_ASSERT((access & (~m_allowedAccesses)) == 0);
5157 DE_ASSERT((stage & (~m_allowedStages)) == 0);
5159 const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
5161 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5163 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5165 if ((dstStage_ & m_allowedStages) == 0)
5168 // Mark stage as incomplete for all stages
5169 m_incompleteOperations[dstStage] |= stage;
5171 if (isWriteAccess(access))
5173 // Mark all accesses from all stages invisible
5174 m_invisibleOperations[dstStage] |= m_allowedAccesses;
5176 // Mark write access from srcStage unavailable to all stages
5177 m_unavailableWriteOperations[dstStage][srcStage] |= access;
5182 void CacheState::submitCommandBuffer (void)
5184 // Flush all host writes and reads
5185 barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
5186 m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
5191 void CacheState::getFullBarrier (vk::VkPipelineStageFlags& srcStages,
5192 vk::VkAccessFlags& srcAccesses,
5193 vk::VkPipelineStageFlags& dstStages,
5194 vk::VkAccessFlags& dstAccesses) const
5201 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5203 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5205 if ((dstStage_ & m_allowedStages) == 0)
5208 // Make sure all previous operation are complete in all stages
5209 if (m_incompleteOperations[dstStage])
5211 dstStages |= dstStage_;
5212 srcStages |= m_incompleteOperations[dstStage];
5215 // Make sure all read operations are visible in dstStage
5216 if (m_invisibleOperations[dstStage])
5218 dstStages |= dstStage_;
5219 dstAccesses |= m_invisibleOperations[dstStage];
5222 // Make sure all write operations fro mall stages are available
5223 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5225 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5227 if ((srcStage_ & m_allowedStages) == 0)
5230 if (m_unavailableWriteOperations[dstStage][srcStage])
5232 dstStages |= dstStage_;
5233 srcStages |= dstStage_;
5234 srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
5239 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5240 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5241 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5242 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5245 void CacheState::barrier (vk::VkPipelineStageFlags srcStages,
5246 vk::VkAccessFlags srcAccesses,
5247 vk::VkPipelineStageFlags dstStages,
5248 vk::VkAccessFlags dstAccesses)
5250 DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
5251 DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
5252 DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
5253 DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
5257 vk::VkPipelineStageFlags oldIncompleteOperations[PIPELINESTAGE_LAST];
5258 vk::VkAccessFlags oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
5260 deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
5261 deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
5263 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5265 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5267 if ((srcStage_ & m_allowedStages) == 0)
5270 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5272 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5274 if ((dstStage_ & m_allowedStages) == 0)
5277 // Stages that have completed before srcStage have also completed before dstStage
5278 m_incompleteOperations[dstStage] &= ~oldIncompleteOperations[srcStage];
5280 for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
5282 const PipelineStage sharedStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
5284 if ((sharedStage_ & m_allowedStages) == 0)
5287 // Writes that are available in srcStage are also available in dstStage
5288 m_unavailableWriteOperations[dstStage][sharedStage] &= ~oldUnavailableWriteOperations[srcStage][sharedStage];
5295 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
5297 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5298 bool allWritesAvailable = true;
5300 if ((dstStage_ & m_allowedStages) == 0)
5303 // Operations in srcStages have completed before any stage in dstStages
5304 m_incompleteOperations[dstStage] &= ~srcStages;
5306 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
5308 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5310 if ((srcStage_ & m_allowedStages) == 0)
5313 // Make srcAccesses from srcStagees available in dstStage
5314 m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
5316 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5317 allWritesAvailable = false;
5320 // If all writes are available in dstStage make dstAccesses also visible
5321 if (allWritesAvailable)
5322 m_invisibleOperations[dstStage] &= ~dstAccesses;
5326 bool CacheState::isClean (void) const
5328 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5330 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5332 if ((dstStage_ & m_allowedStages) == 0)
5335 // Some operations are not visible to some stages
5336 if (m_invisibleOperations[dstStage] != 0)
5339 // There are operation that have not completed yet
5340 if (m_incompleteOperations[dstStage] != 0)
5343 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5345 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5347 if ((srcStage_ & m_allowedStages) == 0)
5350 // Some write operations are not available yet
5351 if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
5359 void CacheState::fullBarrier (void)
5361 for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
5363 const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
5365 if ((dstStage_ & m_allowedStages) == 0)
5368 // All stages have completed
5369 m_incompleteOperations[dstStage] = 0;
5371 // All operations are visible
5372 m_invisibleOperations[dstStage] = 0;
5374 for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
5376 const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
5378 if ((srcStage_ & m_allowedStages) == 0)
5381 // All writes are available
5382 m_unavailableWriteOperations[dstStage][srcStage] = 0;
5389 State (Usage usage, deUint32 seed)
5390 : stage (STAGE_HOST)
5391 , cache (usageToStageFlags(usage), usageToAccessFlags(usage))
5394 , hostInvalidated (true)
5395 , hostFlushed (true)
5396 , memoryDefined (false)
5398 , hasBoundBufferMemory (false)
5400 , hasBoundImageMemory (false)
5401 , imageHasGeneralLayout (false)
5402 , imageDefined (false)
5405 , commandBufferIsEmpty (true)
5414 bool hostInvalidated;
5419 bool hasBoundBufferMemory;
5422 bool hasBoundImageMemory;
5423 bool imageHasGeneralLayout;
5429 bool commandBufferIsEmpty;
5432 void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
5434 if (state.stage == STAGE_HOST)
5436 if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
5438 // Host memory operations
5441 ops.push_back(OP_UNMAP);
5443 // Avoid flush and finish if they are not needed
5444 if (!state.hostFlushed)
5445 ops.push_back(OP_MAP_FLUSH);
5447 if (!state.hostInvalidated
5449 && ((usage & USAGE_HOST_READ) == 0
5450 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5451 && ((usage & USAGE_HOST_WRITE) == 0
5452 || state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
5454 ops.push_back(OP_MAP_INVALIDATE);
5457 if (usage & USAGE_HOST_READ
5458 && usage & USAGE_HOST_WRITE
5459 && state.memoryDefined
5460 && state.hostInvalidated
5461 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
5462 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5464 ops.push_back(OP_MAP_MODIFY);
5467 if (usage & USAGE_HOST_READ
5468 && state.memoryDefined
5469 && state.hostInvalidated
5470 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
5472 ops.push_back(OP_MAP_READ);
5475 if (usage & USAGE_HOST_WRITE
5476 && state.hostInvalidated
5477 && state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
5479 ops.push_back(OP_MAP_WRITE);
5483 ops.push_back(OP_MAP);
5486 if (state.hasBoundBufferMemory && state.queueIdle)
5488 // \note Destroy only buffers after they have been bound
5489 ops.push_back(OP_BUFFER_DESTROY);
5493 if (state.hasBuffer)
5495 if (!state.hasBoundBufferMemory)
5496 ops.push_back(OP_BUFFER_BINDMEMORY);
5498 else if (!state.hasImage && supportsBuffers) // Avoid creating buffer if there is already image
5499 ops.push_back(OP_BUFFER_CREATE);
5502 if (state.hasBoundImageMemory && state.queueIdle)
5504 // \note Destroy only image after they have been bound
5505 ops.push_back(OP_IMAGE_DESTROY);
5511 if (!state.hasBoundImageMemory)
5512 ops.push_back(OP_IMAGE_BINDMEMORY);
5514 else if (!state.hasBuffer && supportsImages) // Avoid creating image if there is already buffer
5515 ops.push_back(OP_IMAGE_CREATE);
5518 // Host writes must be flushed before GPU commands and there must be
5519 // buffer or image for GPU commands
5520 if (state.hostFlushed
5521 && (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
5522 && (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
5523 && (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
5525 ops.push_back(OP_COMMAND_BUFFER_BEGIN);
5528 if (!state.deviceIdle)
5529 ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
5531 if (!state.queueIdle)
5532 ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
5534 else if (state.stage == STAGE_COMMAND_BUFFER)
5536 if (!state.cache.isClean())
5538 ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
5541 ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
5543 if (state.hasBuffer)
5544 ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
5547 if (state.hasBoundBufferMemory)
5549 if (usage & USAGE_TRANSFER_DST
5550 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5552 ops.push_back(OP_BUFFER_FILL);
5553 ops.push_back(OP_BUFFER_UPDATE);
5554 ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
5555 ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
5558 if (usage & USAGE_TRANSFER_SRC
5559 && state.memoryDefined
5560 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5562 ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
5563 ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
5567 if (state.hasBoundImageMemory)
5569 if (!state.imageHasGeneralLayout)
5571 ops.push_back(OP_IMAGE_TRANSITION_TO_GENERAL);
5575 if (usage & USAGE_TRANSFER_DST
5576 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
5578 ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
5579 ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
5580 ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
5583 if (usage & USAGE_TRANSFER_SRC
5584 && state.imageDefined
5585 && state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
5587 ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
5588 ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
5589 ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
5594 // \todo Add other usages?
5595 if (((usage & USAGE_VERTEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5596 || ((usage & USAGE_INDEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT)))
5597 ops.push_back(OP_RENDERPASS_BEGIN);
5599 // \note This depends on previous operations and has to be always the
5600 // last command buffer operation check
5601 if (ops.empty() || !state.commandBufferIsEmpty)
5602 ops.push_back(OP_COMMAND_BUFFER_END);
5604 else if (state.stage == STAGE_RENDER_PASS)
5606 if (usage & USAGE_VERTEX_BUFFER
5607 && state.memoryDefined
5608 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
5610 ops.push_back(OP_RENDER_VERTEX_BUFFER);
5613 if (usage & USAGE_INDEX_BUFFER
5614 && state.memoryDefined
5615 && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
5617 ops.push_back(OP_RENDER_INDEX_BUFFER);
5620 ops.push_back(OP_RENDERPASS_END);
5623 DE_FATAL("Unknown stage");
5626 void applyOp (State& state, const Memory& memory, Op op)
5631 DE_ASSERT(state.stage == STAGE_HOST);
5632 DE_ASSERT(!state.mapped);
5633 state.mapped = true;
5637 DE_ASSERT(state.stage == STAGE_HOST);
5638 DE_ASSERT(state.mapped);
5639 state.mapped = false;
5643 DE_ASSERT(state.stage == STAGE_HOST);
5644 DE_ASSERT(!state.hostFlushed);
5645 state.hostFlushed = true;
5648 case OP_MAP_INVALIDATE:
5649 DE_ASSERT(state.stage == STAGE_HOST);
5650 DE_ASSERT(!state.hostInvalidated);
5651 state.hostInvalidated = true;
5655 DE_ASSERT(state.stage == STAGE_HOST);
5656 DE_ASSERT(state.hostInvalidated);
5657 state.rng.getUint32();
5661 DE_ASSERT(state.stage == STAGE_HOST);
5662 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5663 state.hostFlushed = false;
5665 state.memoryDefined = true;
5666 state.rng.getUint32();
5670 DE_ASSERT(state.stage == STAGE_HOST);
5671 DE_ASSERT(state.hostInvalidated);
5673 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5674 state.hostFlushed = false;
5676 state.rng.getUint32();
5679 case OP_BUFFER_CREATE:
5680 DE_ASSERT(state.stage == STAGE_HOST);
5681 DE_ASSERT(!state.hasBuffer);
5683 state.hasBuffer = true;
5686 case OP_BUFFER_DESTROY:
5687 DE_ASSERT(state.stage == STAGE_HOST);
5688 DE_ASSERT(state.hasBuffer);
5689 DE_ASSERT(state.hasBoundBufferMemory);
5691 state.hasBuffer = false;
5692 state.hasBoundBufferMemory = false;
5695 case OP_BUFFER_BINDMEMORY:
5696 DE_ASSERT(state.stage == STAGE_HOST);
5697 DE_ASSERT(state.hasBuffer);
5698 DE_ASSERT(!state.hasBoundBufferMemory);
5700 state.hasBoundBufferMemory = true;
5703 case OP_IMAGE_CREATE:
5704 DE_ASSERT(state.stage == STAGE_HOST);
5705 DE_ASSERT(!state.hasImage);
5706 DE_ASSERT(!state.hasBuffer);
5708 state.hasImage = true;
5711 case OP_IMAGE_DESTROY:
5712 DE_ASSERT(state.stage == STAGE_HOST);
5713 DE_ASSERT(state.hasImage);
5714 DE_ASSERT(state.hasBoundImageMemory);
5716 state.hasImage = false;
5717 state.hasBoundImageMemory = false;
5718 state.imageHasGeneralLayout = false;
5719 state.imageDefined = false;
5722 case OP_IMAGE_BINDMEMORY:
5723 DE_ASSERT(state.stage == STAGE_HOST);
5724 DE_ASSERT(state.hasImage);
5725 DE_ASSERT(!state.hasBoundImageMemory);
5727 state.hasBoundImageMemory = true;
5730 case OP_IMAGE_TRANSITION_TO_GENERAL:
5731 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5732 DE_ASSERT(state.hasImage);
5733 DE_ASSERT(state.hasBoundImageMemory);
5735 state.imageHasGeneralLayout = true;
5736 state.memoryDefined = false;
5739 case OP_QUEUE_WAIT_FOR_IDLE:
5740 DE_ASSERT(state.stage == STAGE_HOST);
5741 DE_ASSERT(!state.queueIdle);
5743 state.queueIdle = true;
5746 case OP_DEVICE_WAIT_FOR_IDLE:
5747 DE_ASSERT(state.stage == STAGE_HOST);
5748 DE_ASSERT(!state.deviceIdle);
5750 state.queueIdle = true;
5751 state.deviceIdle = true;
5754 case OP_COMMAND_BUFFER_BEGIN:
5755 DE_ASSERT(state.stage == STAGE_HOST);
5756 state.stage = STAGE_COMMAND_BUFFER;
5757 state.commandBufferIsEmpty = true;
5758 // Makes host writes visible to command buffer
5759 state.cache.submitCommandBuffer();
5762 case OP_COMMAND_BUFFER_END:
5763 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5764 state.stage = STAGE_HOST;
5765 state.queueIdle = false;
5766 state.deviceIdle = false;
5767 // \todo Should this set all device reads ready?
5770 case OP_BUFFER_COPY_FROM_BUFFER:
5771 case OP_BUFFER_COPY_FROM_IMAGE:
5772 case OP_BUFFER_UPDATE:
5773 case OP_BUFFER_FILL:
5774 state.rng.getUint32();
5775 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5777 if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5778 state.hostInvalidated = false;
5780 state.commandBufferIsEmpty = false;
5781 state.memoryDefined = true;
5782 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
5785 case OP_BUFFER_COPY_TO_BUFFER:
5786 case OP_BUFFER_COPY_TO_IMAGE:
5787 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5789 state.commandBufferIsEmpty = false;
5790 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
5793 case OP_IMAGE_BLIT_FROM_IMAGE:
5794 state.rng.getBool();
5796 case OP_IMAGE_COPY_FROM_BUFFER:
5797 case OP_IMAGE_COPY_FROM_IMAGE:
5798 state.rng.getUint32();
5799 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5801 state.commandBufferIsEmpty = false;
5802 state.imageDefined = true;
5803 state.memoryDefined = false;
5804 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
5807 case OP_IMAGE_BLIT_TO_IMAGE:
5808 state.rng.getBool();
5810 case OP_IMAGE_COPY_TO_BUFFER:
5811 case OP_IMAGE_COPY_TO_IMAGE:
5812 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5814 state.commandBufferIsEmpty = false;
5815 state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
5818 case OP_PIPELINE_BARRIER_GLOBAL:
5819 case OP_PIPELINE_BARRIER_BUFFER:
5820 case OP_PIPELINE_BARRIER_IMAGE:
5822 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5824 vk::VkPipelineStageFlags dirtySrcStages;
5825 vk::VkAccessFlags dirtySrcAccesses;
5826 vk::VkPipelineStageFlags dirtyDstStages;
5827 vk::VkAccessFlags dirtyDstAccesses;
5829 vk::VkPipelineStageFlags srcStages;
5830 vk::VkAccessFlags srcAccesses;
5831 vk::VkPipelineStageFlags dstStages;
5832 vk::VkAccessFlags dstAccesses;
5834 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
5836 // Try masking some random bits
5837 srcStages = dirtySrcStages & state.rng.getUint32();
5838 srcAccesses = dirtySrcAccesses & state.rng.getUint32();
5840 dstStages = dirtyDstStages & state.rng.getUint32();
5841 dstAccesses = dirtyDstAccesses & state.rng.getUint32();
5843 // If there are no bits in stage mask use the original dirty stages
5844 srcStages = srcStages ? srcStages : dirtySrcStages;
5845 dstStages = dstStages ? dstStages : dirtyDstStages;
5848 srcStages = dstStages;
5850 state.commandBufferIsEmpty = false;
5851 state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
5855 case OP_RENDERPASS_BEGIN:
5857 DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
5859 state.stage = STAGE_RENDER_PASS;
5863 case OP_RENDERPASS_END:
5865 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
5867 state.stage = STAGE_COMMAND_BUFFER;
5871 case OP_RENDER_VERTEX_BUFFER:
5873 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
5875 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
5879 case OP_RENDER_INDEX_BUFFER:
5881 DE_ASSERT(state.stage == STAGE_RENDER_PASS);
5883 state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
5888 DE_FATAL("Unknown op");
5892 de::MovePtr<Command> createHostCommand (Op op,
5895 vk::VkSharingMode sharing)
5899 case OP_MAP: return de::MovePtr<Command>(new Map());
5900 case OP_UNMAP: return de::MovePtr<Command>(new UnMap());
5902 case OP_MAP_FLUSH: return de::MovePtr<Command>(new Flush());
5903 case OP_MAP_INVALIDATE: return de::MovePtr<Command>(new Invalidate());
5905 case OP_MAP_READ: return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
5906 case OP_MAP_WRITE: return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
5907 case OP_MAP_MODIFY: return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
5909 case OP_BUFFER_CREATE: return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
5910 case OP_BUFFER_DESTROY: return de::MovePtr<Command>(new DestroyBuffer());
5911 case OP_BUFFER_BINDMEMORY: return de::MovePtr<Command>(new BindBufferMemory());
5913 case OP_IMAGE_CREATE: return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
5914 case OP_IMAGE_DESTROY: return de::MovePtr<Command>(new DestroyImage());
5915 case OP_IMAGE_BINDMEMORY: return de::MovePtr<Command>(new BindImageMemory());
5917 case OP_QUEUE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new QueueWaitIdle());
5918 case OP_DEVICE_WAIT_FOR_IDLE: return de::MovePtr<Command>(new DeviceWaitIdle());
5921 DE_FATAL("Unknown op");
5922 return de::MovePtr<Command>(DE_NULL);
5926 de::MovePtr<CmdCommand> createCmdCommand (de::Random& rng,
5932 case OP_BUFFER_FILL: return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
5933 case OP_BUFFER_UPDATE: return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
5934 case OP_BUFFER_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
5935 case OP_BUFFER_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
5937 case OP_BUFFER_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyToImage());
5938 case OP_BUFFER_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
5940 case OP_IMAGE_TRANSITION_TO_GENERAL: return de::MovePtr<CmdCommand>(new ImageTransition());
5942 case OP_IMAGE_COPY_TO_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyToBuffer());
5943 case OP_IMAGE_COPY_FROM_BUFFER: return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32()));
5944 case OP_IMAGE_COPY_TO_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyToImage());
5945 case OP_IMAGE_COPY_FROM_IMAGE: return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32()));
5946 case OP_IMAGE_BLIT_TO_IMAGE:
5948 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
5949 return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale));
5952 case OP_IMAGE_BLIT_FROM_IMAGE:
5954 const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
5955 return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale));
5958 case OP_PIPELINE_BARRIER_GLOBAL:
5959 case OP_PIPELINE_BARRIER_BUFFER:
5960 case OP_PIPELINE_BARRIER_IMAGE:
5962 vk::VkPipelineStageFlags dirtySrcStages;
5963 vk::VkAccessFlags dirtySrcAccesses;
5964 vk::VkPipelineStageFlags dirtyDstStages;
5965 vk::VkAccessFlags dirtyDstAccesses;
5967 vk::VkPipelineStageFlags srcStages;
5968 vk::VkAccessFlags srcAccesses;
5969 vk::VkPipelineStageFlags dstStages;
5970 vk::VkAccessFlags dstAccesses;
5972 state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
5974 // Try masking some random bits
5975 srcStages = dirtySrcStages & rng.getUint32();
5976 srcAccesses = dirtySrcAccesses & rng.getUint32();
5978 dstStages = dirtyDstStages & rng.getUint32();
5979 dstAccesses = dirtyDstAccesses & rng.getUint32();
5981 // If there are no bits in stage mask use the original dirty stages
5982 srcStages = srcStages ? srcStages : dirtySrcStages;
5983 dstStages = dstStages ? dstStages : dirtyDstStages;
5986 srcStages = dstStages;
5988 PipelineBarrier::Type type;
5990 if (op == OP_PIPELINE_BARRIER_IMAGE)
5991 type = PipelineBarrier::TYPE_IMAGE;
5992 else if (op == OP_PIPELINE_BARRIER_BUFFER)
5993 type = PipelineBarrier::TYPE_BUFFER;
5994 else if (op == OP_PIPELINE_BARRIER_GLOBAL)
5995 type = PipelineBarrier::TYPE_GLOBAL;
5998 type = PipelineBarrier::TYPE_LAST;
5999 DE_FATAL("Unknown op");
6002 return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type));
6006 DE_FATAL("Unknown op");
6007 return de::MovePtr<CmdCommand>(DE_NULL);
6011 de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
6017 case OP_RENDER_VERTEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_VERTEX_BUFFER));
6018 case OP_RENDER_INDEX_BUFFER: return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_INDEX_BUFFER));
6021 DE_FATAL("Unknown op");
6022 return de::MovePtr<RenderPassCommand>(DE_NULL);
6026 de::MovePtr<CmdCommand> createRenderPassCommands (const Memory& memory,
6027 de::Random& nextOpRng,
6033 // \todo Exception safety
6034 vector<RenderPassCommand*> commands;
6036 for (; opNdx < opCount; opNdx++)
6040 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6042 DE_ASSERT(!ops.empty());
6045 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6047 if (op == OP_RENDERPASS_END)
6053 de::Random rng (state.rng);
6055 commands.push_back(createRenderPassCommand(rng, state, op).release());
6056 applyOp(state, memory, op);
6058 DE_ASSERT(state.rng == rng);
6063 applyOp(state, memory, OP_RENDERPASS_END);
6064 return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
6067 de::MovePtr<Command> createCmdCommands (const Memory& memory,
6068 de::Random& nextOpRng,
6074 // \todo Exception safety
6075 vector<CmdCommand*> commands;
6077 for (; opNdx < opCount; opNdx++)
6081 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6083 DE_ASSERT(!ops.empty());
6086 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6088 if (op == OP_COMMAND_BUFFER_END)
6094 // \note Command needs to known the state before the operation
6095 if (op == OP_RENDERPASS_BEGIN)
6097 applyOp(state, memory, op);
6098 commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6102 de::Random rng (state.rng);
6104 commands.push_back(createCmdCommand(rng, state, op).release());
6105 applyOp(state, memory, op);
6107 DE_ASSERT(state.rng == rng);
6114 applyOp(state, memory, OP_COMMAND_BUFFER_END);
6115 return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
6118 void createCommands (vector<Command*>& commands,
6120 const Memory& memory,
6122 vk::VkSharingMode sharingMode)
6124 const size_t opCount = 100;
6125 State state (usage, seed);
6126 // Used to select next operation only
6127 de::Random nextOpRng (seed ^ 12930809);
6129 commands.reserve(opCount);
6131 for (size_t opNdx = 0; opNdx < opCount; opNdx++)
6135 getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
6137 DE_ASSERT(!ops.empty());
6140 const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
6142 if (op == OP_COMMAND_BUFFER_BEGIN)
6144 applyOp(state, memory, op);
6145 commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
6149 de::Random rng (state.rng);
6151 commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
6152 applyOp(state, memory, op);
6154 // Make sure that random generator is in sync
6155 DE_ASSERT(state.rng == rng);
6160 // Clean up resources
6161 if (state.hasBuffer && state.hasImage)
6163 if (!state.queueIdle)
6164 commands.push_back(new QueueWaitIdle());
6166 if (state.hasBuffer)
6167 commands.push_back(new DestroyBuffer());
6170 commands.push_back(new DestroyImage());
6174 void testCommand (TestLog& log,
6175 tcu::ResultCollector& resultCollector,
6176 const vk::ProgramCollection<vk::ProgramBinary>& binaryCollection,
6177 const vk::InstanceInterface& vki,
6178 const vk::DeviceInterface& vkd,
6179 vk::VkPhysicalDevice physicalDevice,
6180 vk::VkDevice device,
6181 vk::VkDeviceSize size,
6182 deUint32 memoryTypeIndex,
6184 vk::VkSharingMode sharingMode,
6185 vk::VkQueue executionQueue,
6186 deUint32 executionQueueFamily,
6187 const vector<deUint32>& queueFamilies,
6188 const vk::VkDeviceSize maxBufferSize,
6189 const IVec2 maxImageSize)
6191 const deUint32 seed = 2830980989u;
6192 Memory memory (vki, vkd, physicalDevice, device, size, memoryTypeIndex, maxBufferSize, maxImageSize[0], maxImageSize[1]);
6193 vector<Command*> commands;
6194 vector<pair<deUint32, vk::VkQueue> > queues;
6198 log << TestLog::Message << "Create commands" << TestLog::EndMessage;
6199 createCommands(commands, seed, memory, usage, sharingMode);
6201 for (size_t queueNdx = 0; queueNdx < queueFamilies.size(); queueNdx++)
6205 vkd.getDeviceQueue(device, queueFamilies[queueNdx], 0, &queue);
6206 queues.push_back(std::make_pair(queueFamilies[queueNdx], queue));
6210 const tcu::ScopedLogSection section (log, "LogPrepare", "LogPrepare");
6212 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6213 commands[cmdNdx]->logPrepare(log, cmdNdx);
6217 const tcu::ScopedLogSection section (log, "LogExecute", "LogExecute");
6219 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6220 commands[cmdNdx]->logExecute(log, cmdNdx);
6224 const Context context (vki, vkd, physicalDevice, device, executionQueue, executionQueueFamily, queues, binaryCollection);
6229 PrepareContext prepareContext (context, memory);
6231 log << TestLog::Message << "Begin prepare" << TestLog::EndMessage;
6233 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6235 Command& command = *commands[cmdNdx];
6239 command.prepare(prepareContext);
6243 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare for execution");
6248 ExecuteContext executeContext (context);
6250 log << TestLog::Message << "Begin execution" << TestLog::EndMessage;
6252 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6254 Command& command = *commands[cmdNdx];
6258 command.execute(executeContext);
6262 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute");
6267 VK_CHECK(vkd.deviceWaitIdle(device));
6271 const tcu::ScopedLogSection section (log, "Verify", "Verify");
6272 VerifyContext verifyContext (log, resultCollector, context, size);
6274 log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
6276 for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
6278 Command& command = *commands[cmdNdx];
6282 command.verify(verifyContext, cmdNdx);
6286 resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed verification");
6292 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6294 delete commands[commandNdx];
6295 commands[commandNdx] = DE_NULL;
6300 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6302 delete commands[commandNdx];
6303 commands[commandNdx] = DE_NULL;
6312 for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
6314 delete commands[commandNdx];
6315 commands[commandNdx] = DE_NULL;
6321 class MemoryTestInstance : public TestInstance
6325 MemoryTestInstance (::vkt::Context& context, const TestConfig& config);
6327 tcu::TestStatus iterate (void);
6330 const TestConfig m_config;
6331 const vk::VkPhysicalDeviceMemoryProperties m_memoryProperties;
6332 deUint32 m_memoryTypeNdx;
6333 tcu::ResultCollector m_resultCollector;
6336 MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
6337 : TestInstance (context)
6339 , m_memoryProperties (vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
6340 , m_memoryTypeNdx (0)
6341 , m_resultCollector (context.getTestContext().getLog())
6343 TestLog& log = context.getTestContext().getLog();
6345 const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
6347 log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
6348 log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
6349 log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
6353 const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
6355 for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
6357 const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
6359 log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
6360 log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
6363 for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
6365 const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
6367 log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
6368 log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
6373 tcu::TestStatus MemoryTestInstance::iterate (void)
6375 // \todo Split different stages over multiple iterations
6376 if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
6378 TestLog& log = m_context.getTestContext().getLog();
6379 const tcu::ScopedLogSection section (log, "MemoryType" + de::toString(m_memoryTypeNdx), "Memory type " + de::toString(m_memoryTypeNdx));
6380 const vk::InstanceInterface& vki = m_context.getInstanceInterface();
6381 const vk::VkPhysicalDevice physicalDevice = m_context.getPhysicalDevice();
6382 const vk::DeviceInterface& vkd = m_context.getDeviceInterface();
6383 const vk::VkDevice device = m_context.getDevice();
6384 const vk::VkQueue queue = m_context.getUniversalQueue();
6385 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
6386 const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
6387 vector<deUint32> queues;
6389 queues.push_back(queueFamilyIndex);
6391 if (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)
6392 && !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
6394 log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
6397 return tcu::TestStatus::incomplete();
6403 const vk::VkBufferUsageFlags bufferUsage = usageToBufferUsageFlags(m_config.usage);
6404 const vk::VkImageUsageFlags imageUsage = usageToImageUsageFlags(m_config.usage);
6405 const vk::VkDeviceSize maxBufferSize = bufferUsage != 0
6406 ? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
6408 const IVec2 maxImageSize = imageUsage != 0
6409 ? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
6412 log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
6413 log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
6414 testCommand(log, m_resultCollector, m_context.getBinaryCollection(), vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, m_config.usage, m_config.sharing, queue, queueFamilyIndex, queues, maxBufferSize, maxImageSize);
6416 catch (const tcu::TestError& e)
6418 m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
6422 return tcu::TestStatus::incomplete();
6426 return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
6431 void init (vk::SourceCollections& sources, TestConfig config) const
6433 // Vertex buffer rendering
6434 if (config.usage & USAGE_VERTEX_BUFFER)
6436 const char* const vertexShader =
6438 "layout(location = 0) in highp vec2 a_position;\n"
6439 "void main (void) {\n"
6440 "\tgl_PointSize = 1.0;\n"
6441 "\tgl_Position = vec4(1.999 * a_position - vec2(0.999), 0.0, 1.0);\n"
6444 sources.glslSources.add("vertex-buffer.vert")
6445 << glu::VertexSource(vertexShader);
6448 // Index buffer rendering
6449 if (config.usage & USAGE_INDEX_BUFFER)
6451 const char* const vertexShader =
6454 "void main (void) {\n"
6455 "\tgl_PointSize = 1.0;\n"
6456 "\thighp vec2 pos = vec2(gl_VertexID % 256, gl_VertexID / 256) / vec2(255.0);\n"
6457 "\tgl_Position = vec4(1.999 * pos - vec2(0.999), 0.0, 1.0);\n"
6460 sources.glslSources.add("index-buffer.vert")
6461 << glu::VertexSource(vertexShader);
6465 const char* const fragmentShader =
6467 "layout(location = 0) out highp vec4 o_color;\n"
6468 "void main (void) {\n"
6469 "\to_color = vec4(1.0);\n"
6472 sources.glslSources.add("render-white.frag")
6473 << glu::FragmentSource(fragmentShader);
6480 tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
6482 de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
6483 const vk::VkDeviceSize sizes[] =
6488 const Usage usages[] =
6494 USAGE_VERTEX_BUFFER,
6497 const Usage readUsages[] =
6501 USAGE_VERTEX_BUFFER,
6505 const Usage writeUsages[] =
6511 for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
6513 const Usage writeUsage = writeUsages[writeUsageNdx];
6515 for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
6517 const Usage readUsage = readUsages[readUsageNdx];
6518 const Usage usage = writeUsage | readUsage;
6519 const string usageGroupName (usageToName(usage));
6520 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6522 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6524 const vk::VkDeviceSize size = sizes[sizeNdx];
6525 const string testName (de::toString((deUint64)(size)));
6526 const TestConfig config =
6530 vk::VK_SHARING_MODE_EXCLUSIVE
6533 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6536 group->addChild(usageGroup.get());
6537 usageGroup.release();
6542 Usage all = (Usage)0;
6544 for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
6545 all = all | usages[usageNdx];
6548 const string usageGroupName ("all");
6549 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6551 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6553 const vk::VkDeviceSize size = sizes[sizeNdx];
6554 const string testName (de::toString((deUint64)(size)));
6555 const TestConfig config =
6559 vk::VK_SHARING_MODE_EXCLUSIVE
6562 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6565 group->addChild(usageGroup.get());
6566 usageGroup.release();
6570 const string usageGroupName ("all_device");
6571 de::MovePtr<tcu::TestCaseGroup> usageGroup (new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
6573 for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
6575 const vk::VkDeviceSize size = sizes[sizeNdx];
6576 const string testName (de::toString((deUint64)(size)));
6577 const TestConfig config =
6579 (Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
6581 vk::VK_SHARING_MODE_EXCLUSIVE
6584 usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE, testName, testName, AddPrograms(), config));
6587 group->addChild(usageGroup.get());
6588 usageGroup.release();
6592 return group.release();