1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2017 The Khronos Group Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
20 * \file vktImageCompressionTranscodingSupport.cpp
21 * \brief Compression transcoding support
22 *//*--------------------------------------------------------------------*/
24 #include "vktImageCompressionTranscodingSupport.hpp"
26 #include "deUniquePtr.hpp"
27 #include "deStringUtil.hpp"
28 #include "deSharedPtr.hpp"
29 #include "deRandom.hpp"
31 #include "vktTestCaseUtil.hpp"
32 #include "vkPrograms.hpp"
33 #include "vkImageUtil.hpp"
34 #include "vktImageTestsUtil.hpp"
35 #include "vkBuilderUtil.hpp"
37 #include "vkRefUtil.hpp"
38 #include "vkTypeUtil.hpp"
39 #include "vkQueryUtil.hpp"
41 #include "tcuTextureUtil.hpp"
42 #include "tcuTexture.hpp"
43 #include "tcuCompressedTexture.hpp"
44 #include "tcuVectorType.hpp"
45 #include "tcuResource.hpp"
46 #include "tcuImageIO.hpp"
47 #include "tcuImageCompare.hpp"
48 #include "tcuTestLog.hpp"
49 #include "tcuRGBA.hpp"
50 #include "tcuSurface.hpp"
62 using tcu::TestContext;
63 using tcu::TestStatus;
66 using tcu::CompressedTexFormat;
67 using tcu::CompressedTexture;
70 using tcu::ConstPixelBufferAccess;
75 typedef SharedPtr<MovePtr<Image> > ImageSp;
76 typedef SharedPtr<Move<VkImageView> > ImageViewSp;
77 typedef SharedPtr<Move<VkDescriptorSet> > SharedVkDescriptorSet;
89 OPERATION_TEXEL_FETCH,
91 OPERATION_IMAGE_STORE,
92 OPERATION_ATTACHMENT_READ,
93 OPERATION_ATTACHMENT_WRITE,
94 OPERATION_TEXTURE_READ,
95 OPERATION_TEXTURE_WRITE,
105 VkFormat formatCompressed;
106 VkFormat formatUncompressed;
107 deUint32 imagesCount;
108 VkImageUsageFlags compressedImageUsage;
109 VkImageUsageFlags compressedImageViewUsage;
110 VkImageUsageFlags uncompressedImageUsage;
112 VkFormat formatForVerify;
116 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
118 return SharedPtr<Move<T> >(new Move<T>(move));
122 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
124 return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
127 const deUint32 SINGLE_LEVEL = 1u;
128 const deUint32 SINGLE_LAYER = 1u;
130 class BasicTranscodingTestInstance : public TestInstance
133 BasicTranscodingTestInstance (Context& context,
134 const TestParameters& parameters);
135 virtual TestStatus iterate (void) = 0;
137 void generateData (deUint8* toFill,
139 const VkFormat format,
140 const deUint32 layer = 0u,
141 const deUint32 level = 0u);
142 deUint32 getLevelCount ();
143 deUint32 getLayerCount ();
144 UVec3 getLayerDims ();
145 vector<UVec3> getMipLevelSizes (UVec3 baseSize);
146 vector<UVec3> getCompressedMipLevelSizes (const VkFormat compressedFormat,
147 const vector<UVec3>& uncompressedSizes);
149 const TestParameters m_parameters;
150 const deUint32 m_blockWidth;
151 const deUint32 m_blockHeight;
152 const deUint32 m_levelCount;
153 const UVec3 m_layerSize;
156 deUint32 findMipMapLevelCount ();
159 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
161 deUint32 levelCount = 1;
163 // We cannot use mipmap levels which have resolution below block size.
164 // Reduce number of mipmap levels
165 if (m_parameters.useMipmaps)
167 deUint32 w = m_parameters.size.x();
168 deUint32 h = m_parameters.size.y();
170 DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
172 while (w > m_blockWidth && h > m_blockHeight)
177 if (w > m_blockWidth && h > m_blockHeight)
181 DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
182 DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
188 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
189 : TestInstance (context)
190 , m_parameters (parameters)
191 , m_blockWidth (getBlockWidth(m_parameters.formatCompressed))
192 , m_blockHeight (getBlockHeight(m_parameters.formatCompressed))
193 , m_levelCount (findMipMapLevelCount())
194 , m_layerSize (getLayerSize(m_parameters.imageType, m_parameters.size))
196 DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
199 deUint32 BasicTranscodingTestInstance::getLevelCount()
204 deUint32 BasicTranscodingTestInstance::getLayerCount()
206 return m_parameters.size.z();
209 UVec3 BasicTranscodingTestInstance::getLayerDims()
214 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
216 vector<UVec3> levelSizes;
217 const deUint32 levelCount = getLevelCount();
219 DE_ASSERT(m_parameters.imageType == IMAGE_TYPE_2D || m_parameters.imageType == IMAGE_TYPE_2D_ARRAY);
223 levelSizes.push_back(baseSize);
225 while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
227 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
228 baseSize.y() = deMax32(baseSize.y() >> 1, 1);
229 levelSizes.push_back(baseSize);
232 DE_ASSERT(levelSizes.size() == getLevelCount());
237 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
239 vector<UVec3> levelSizes;
240 vector<UVec3>::const_iterator it;
242 for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
243 levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
248 void BasicTranscodingTestInstance::generateData (deUint8* toFill,
250 const VkFormat format,
251 const deUint32 layer,
252 const deUint32 level)
254 const deUint8 pattern[] =
257 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
258 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
259 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
260 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
261 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
262 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
263 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
264 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
265 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
266 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
267 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Positive infinity
268 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Negative infinity
269 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
270 0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
271 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
272 0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
273 0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
274 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
275 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
276 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
278 0x7F, 0x80, 0x00, 0x00, // Positive infinity
279 0xFF, 0x80, 0x00, 0x00, // Negative infinity
280 0x7F, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
281 0x7F, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
282 0xFF, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
283 0xFF, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
284 0x7F, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
285 0x7F, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
286 0xFF, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
287 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
288 0xAA, 0xAA, 0xAA, 0xAA,
289 0x55, 0x55, 0x55, 0x55,
292 deUint8* start = toFill;
293 size_t sizeToRnd = size;
296 if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
299 for (size_t i = 0; i < sizeof(pattern); i++)
300 start[sizeof(pattern) - i - 1] = pattern[i];
302 start += sizeof(pattern);
303 sizeToRnd -= sizeof(pattern);
306 deMemcpy(start, pattern, sizeof(pattern));
308 start += sizeof(pattern);
309 sizeToRnd -= sizeof(pattern);
314 DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
316 deUint32* start32 = reinterpret_cast<deUint32*>(start);
317 size_t sizeToRnd32 = sizeToRnd / sizeof(deUint32);
318 deUint32 seed = (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
321 for (size_t i = 0; i < sizeToRnd32; i++)
322 start32[i] = rnd.getUint32();
326 // Remove certain values that may not be preserved based on the uncompressed view format
327 if (isSnormFormat(m_parameters.formatUncompressed))
329 for (size_t i = 0; i < size; i += 2)
331 // SNORM fix: due to write operation in SNORM format
332 // replaces 0x00 0x80 to 0x01 0x80
333 if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
337 else if (isFloatFormat(m_parameters.formatUncompressed))
339 tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
341 if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
343 for (size_t i = 0; i < size; i += 2)
345 // HALF_FLOAT fix: remove INF and NaN
346 if ((toFill[i+1] & 0x7C) == 0x7C)
350 else if (textureFormat.type == tcu::TextureFormat::FLOAT)
352 for (size_t i = 0; i < size; i += 4)
354 // HALF_FLOAT fix: remove INF and NaN
355 if ((toFill[i+1] & 0x7C) == 0x7C)
359 for (size_t i = 0; i < size; i += 4)
361 // FLOAT fix: remove INF, NaN, and denorm
363 if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
366 if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
374 class BasicComputeTestInstance : public BasicTranscodingTestInstance
377 BasicComputeTestInstance (Context& context,
378 const TestParameters& parameters);
379 TestStatus iterate (void);
383 deUint32 getImagesCount (void) { return static_cast<deUint32>(images.size()); }
384 deUint32 getImageViewCount (void) { return static_cast<deUint32>(imagesViews.size()); }
385 deUint32 getImageInfoCount (void) { return static_cast<deUint32>(imagesInfos.size()); }
386 VkImage getImage (const deUint32 ndx) { return **images[ndx]->get(); }
387 VkImageView getImageView (const deUint32 ndx) { return **imagesViews[ndx]; }
388 VkImageCreateInfo getImageInfo (const deUint32 ndx) { return imagesInfos[ndx]; }
389 void addImage (MovePtr<Image> image) { images.push_back(makeVkSharedPtr(image)); }
390 void addImageView (Move<VkImageView> imageView) { imagesViews.push_back(makeVkSharedPtr(imageView));}
391 void addImageInfo (const VkImageCreateInfo imageInfo) { imagesInfos.push_back(imageInfo); }
392 void resetViews () { imagesViews.clear(); }
394 vector<ImageSp> images;
395 vector<ImageViewSp> imagesViews;
396 vector<VkImageCreateInfo> imagesInfos;
398 void copyDataToImage (const VkCommandBuffer& cmdBuffer,
399 ImageData& imageData,
400 const vector<UVec3>& mipMapSizes,
401 const bool isCompressed);
402 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
403 const VkDescriptorSetLayout& descriptorSetLayout,
404 const VkDescriptorPool& descriptorPool,
405 vector<ImageData>& imageData);
406 bool copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
407 const VkImage& uncompressed,
408 const VkDeviceSize offset,
410 void descriptorSetUpdate (VkDescriptorSet descriptorSet,
411 const VkDescriptorImageInfo* descriptorImageInfos);
412 void createImageInfos (ImageData& imageData,
413 const vector<UVec3>& mipMapSizes,
414 const bool isCompressed);
415 bool decompressImage (const VkCommandBuffer& cmdBuffer,
416 vector<ImageData>& imageData,
417 const vector<UVec3>& mipMapSizes);
418 vector<deUint8> m_data;
422 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
423 :BasicTranscodingTestInstance (context, parameters)
427 TestStatus BasicComputeTestInstance::iterate (void)
429 const DeviceInterface& vk = m_context.getDeviceInterface();
430 const VkDevice device = m_context.getDevice();
431 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
432 Allocator& allocator = m_context.getDefaultAllocator();
433 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
434 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
435 const vector<UVec3> mipMapSizes = m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, m_parameters.size);
436 vector<ImageData> imageData (m_parameters.imagesCount);
437 const deUint32 compressedNdx = 0u;
438 const deUint32 resultImageNdx = m_parameters.imagesCount -1u;
440 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
442 const bool isCompressed = compressedNdx == imageNdx ? true : false;
443 createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
444 for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
446 imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
449 const VkImageViewUsageCreateInfoKHR imageViewUsageKHR =
451 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
452 DE_NULL, //const void* pNext;
453 m_parameters.compressedImageUsage, //VkImageUsageFlags usage;
455 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
456 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
458 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
459 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
460 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
461 &imageViewUsageKHR));
466 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
467 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
468 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
475 for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
477 size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
480 generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
483 switch(m_parameters.operation)
485 case OPERATION_IMAGE_LOAD:
486 case OPERATION_TEXEL_FETCH:
487 case OPERATION_TEXTURE:
488 copyDataToImage(*cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
490 case OPERATION_IMAGE_STORE:
491 copyDataToImage(*cmdBuffer, imageData[1], mipMapSizes, false);
499 Move<VkDescriptorSetLayout> descriptorSetLayout;
500 Move<VkDescriptorPool> descriptorPool;
502 DescriptorSetLayoutBuilder descriptorSetLayoutBuilder;
503 DescriptorPoolBuilder descriptorPoolBuilder;
504 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
506 switch(m_parameters.operation)
508 case OPERATION_IMAGE_LOAD:
509 case OPERATION_IMAGE_STORE:
510 descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
511 descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
513 case OPERATION_TEXEL_FETCH:
514 case OPERATION_TEXTURE:
515 descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
516 descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
523 descriptorSetLayout = descriptorSetLayoutBuilder.build(vk, device);
524 descriptorPool = descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
525 executeShader(*cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
528 VkDeviceSize offset = 0ull;
529 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
530 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
532 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
533 const UVec3 size = UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
534 imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
535 imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
536 if (!copyResultAndCompare(*cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
537 return TestStatus::fail("Fail");
538 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
542 if (!decompressImage(*cmdBuffer, imageData, mipMapSizes))
543 return TestStatus::fail("Fail");
544 return TestStatus::pass("Pass");
547 void BasicComputeTestInstance::copyDataToImage (const VkCommandBuffer& cmdBuffer,
548 ImageData& imageData,
549 const vector<UVec3>& mipMapSizes,
550 const bool isCompressed)
552 const DeviceInterface& vk = m_context.getDeviceInterface();
553 const VkDevice device = m_context.getDevice();
554 const VkQueue queue = m_context.getUniversalQueue();
555 Allocator& allocator = m_context.getDefaultAllocator();
557 Buffer imageBuffer (vk, device, allocator,
558 makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
559 MemoryRequirement::HostVisible);
560 VkDeviceSize offset = 0ull;
562 const Allocation& alloc = imageBuffer.getAllocation();
563 deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
564 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_data.size());
567 beginCommandBuffer(vk, cmdBuffer);
568 const VkImageSubresourceRange subresourceRange =
570 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
571 0u, //deUint32 baseMipLevel
572 imageData.getImageInfo(0u).mipLevels, //deUint32 levelCount
573 0u, //deUint32 baseArrayLayer
574 imageData.getImageInfo(0u).arrayLayers //deUint32 layerCount
577 for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
579 const VkImageMemoryBarrier preCopyImageBarrier = makeImageMemoryBarrier(
580 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
581 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
582 imageData.getImage(imageNdx), subresourceRange);
584 const VkBufferMemoryBarrier FlushHostCopyBarrier = makeBufferMemoryBarrier(
585 VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
586 imageBuffer.get(), 0ull, m_data.size());
588 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
589 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
591 for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
593 const VkExtent3D imageExtent = isCompressed ?
594 makeExtent3D(mipMapSizes[mipNdx]) :
595 imageData.getImageInfo(imageNdx).extent;
596 const VkBufferImageCopy copyRegion =
598 offset, //VkDeviceSize bufferOffset;
599 0u, //deUint32 bufferRowLength;
600 0u, //deUint32 bufferImageHeight;
601 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers), //VkImageSubresourceLayers imageSubresource;
602 makeOffset3D(0, 0, 0), //VkOffset3D imageOffset;
603 imageExtent, //VkExtent3D imageExtent;
606 vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
607 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
608 UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
609 imageData.getImageInfo(imageNdx).arrayLayers;
612 endCommandBuffer(vk, cmdBuffer);
613 submitCommandsAndWait(vk, device, queue, cmdBuffer);
616 void BasicComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
617 const VkDescriptorSetLayout& descriptorSetLayout,
618 const VkDescriptorPool& descriptorPool,
619 vector<ImageData>& imageData)
621 const DeviceInterface& vk = m_context.getDeviceInterface();
622 const VkDevice device = m_context.getDevice();
623 const VkQueue queue = m_context.getUniversalQueue();
624 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
625 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
626 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
627 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
628 Move<VkSampler> sampler;
630 const VkSamplerCreateInfo createInfo =
632 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
633 DE_NULL, //const void* pNext;
634 0u, //VkSamplerCreateFlags flags;
635 VK_FILTER_NEAREST, //VkFilter magFilter;
636 VK_FILTER_NEAREST, //VkFilter minFilter;
637 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
638 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
639 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
640 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
641 0.0f, //float mipLodBias;
642 VK_FALSE, //VkBool32 anisotropyEnable;
643 1.0f, //float maxAnisotropy;
644 VK_FALSE, //VkBool32 compareEnable;
645 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
646 0.0f, //float minLod;
647 0.0f, //float maxLod;
648 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
649 VK_FALSE, //VkBool32 unnormalizedCoordinates;
651 sampler = createSampler(vk, device, &createInfo);
654 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
655 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
657 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
658 for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
660 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
661 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
665 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
666 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
668 beginCommandBuffer(vk, cmdBuffer);
670 const VkImageSubresourceRange compressedRange =
672 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
673 0u, //deUint32 baseMipLevel
674 imageData[0].getImageInfo(0u).mipLevels, //deUint32 levelCount
675 0u, //deUint32 baseArrayLayer
676 imageData[0].getImageInfo(0u).arrayLayers //deUint32 layerCount
678 const VkImageSubresourceRange uncompressedRange =
680 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
681 0u, //deUint32 baseMipLevel
682 1u, //deUint32 levelCount
683 0u, //deUint32 baseArrayLayer
684 1u //deUint32 layerCount
687 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
689 vector<VkImageMemoryBarrier> preShaderImageBarriers;
690 preShaderImageBarriers.resize(descriptorSets.size() + 1u);
691 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
693 preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
694 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
695 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
696 imageData[1].getImage(imageNdx), uncompressedRange);
699 preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
700 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
701 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
702 imageData[0].getImage(0), compressedRange);
704 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
705 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
706 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
708 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
710 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
711 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
712 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
713 imageData[1].getImageInfo(ndx).extent.height,
714 imageData[1].getImageInfo(ndx).extent.depth);
717 endCommandBuffer(vk, cmdBuffer);
718 submitCommandsAndWait(vk, device, queue, cmdBuffer);
721 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
722 const VkImage& uncompressed,
723 const VkDeviceSize offset,
726 const DeviceInterface& vk = m_context.getDeviceInterface();
727 const VkQueue queue = m_context.getUniversalQueue();
728 const VkDevice device = m_context.getDevice();
729 Allocator& allocator = m_context.getDefaultAllocator();
731 VkDeviceSize imageResultSize = getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
732 Buffer imageBufferResult (vk, device, allocator,
733 makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
734 MemoryRequirement::HostVisible);
736 beginCommandBuffer(vk, cmdBuffer);
738 const VkImageSubresourceRange subresourceRange =
740 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
741 0u, //deUint32 baseMipLevel
742 1u, //deUint32 levelCount
743 0u, //deUint32 baseArrayLayer
744 1u //deUint32 layerCount
747 const VkBufferImageCopy copyRegion =
749 0ull, // VkDeviceSize bufferOffset;
750 0u, // deUint32 bufferRowLength;
751 0u, // deUint32 bufferImageHeight;
752 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
753 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
754 makeExtent3D(size), // VkExtent3D imageExtent;
757 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
758 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
759 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
760 uncompressed, subresourceRange);
762 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
763 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
764 imageBufferResult.get(), 0ull, imageResultSize);
766 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
767 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, ©Region);
768 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
770 endCommandBuffer(vk, cmdBuffer);
771 submitCommandsAndWait(vk, device, queue, cmdBuffer);
773 const Allocation& allocResult = imageBufferResult.getAllocation();
774 invalidateMappedMemoryRange(vk, device, allocResult.getMemory(), allocResult.getOffset(), imageResultSize);
775 if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
780 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
782 const DeviceInterface& vk = m_context.getDeviceInterface();
783 const VkDevice device = m_context.getDevice();
784 DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
786 switch(m_parameters.operation)
788 case OPERATION_IMAGE_LOAD:
789 case OPERATION_IMAGE_STORE:
791 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
792 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
797 case OPERATION_TEXEL_FETCH:
798 case OPERATION_TEXTURE:
800 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
802 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
803 bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
812 descriptorSetUpdateBuilder.update(vk, device);
815 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
817 const VkImageType imageType = mapImageType(m_parameters.imageType);
821 const VkExtent3D extentCompressed = makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
822 const VkImageCreateInfo compressedInfo =
824 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
825 DE_NULL, // const void* pNext;
826 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
827 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR |
828 VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, // VkImageCreateFlags flags;
829 imageType, // VkImageType imageType;
830 m_parameters.formatCompressed, // VkFormat format;
831 extentCompressed, // VkExtent3D extent;
832 static_cast<deUint32>(mipMapSizes.size()), // deUint32 mipLevels;
833 getLayerCount(), // deUint32 arrayLayers;
834 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
835 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
836 VK_IMAGE_USAGE_SAMPLED_BIT |
837 VK_IMAGE_USAGE_STORAGE_BIT |
838 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
839 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
840 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
841 0u, // deUint32 queueFamilyIndexCount;
842 DE_NULL, // const deUint32* pQueueFamilyIndices;
843 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
845 imageData.addImageInfo(compressedInfo);
849 for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
850 for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
852 const VkExtent3D extentUncompressed = m_parameters.useMipmaps ?
853 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
854 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, m_parameters.size));
855 const VkImageCreateInfo uncompressedInfo =
857 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
858 DE_NULL, // const void* pNext;
859 0u, // VkImageCreateFlags flags;
860 imageType, // VkImageType imageType;
861 m_parameters.formatUncompressed, // VkFormat format;
862 extentUncompressed, // VkExtent3D extent;
863 1u, // deUint32 mipLevels;
864 1u, // deUint32 arrayLayers;
865 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
866 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
867 m_parameters.uncompressedImageUsage |
868 VK_IMAGE_USAGE_SAMPLED_BIT, // VkImageUsageFlags usage;
869 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
870 0u, // deUint32 queueFamilyIndexCount;
871 DE_NULL, // const deUint32* pQueueFamilyIndices;
872 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
874 imageData.addImageInfo(uncompressedInfo);
879 bool BasicComputeTestInstance::decompressImage (const VkCommandBuffer& cmdBuffer,
880 vector<ImageData>& imageData,
881 const vector<UVec3>& mipMapSizes)
883 const DeviceInterface& vk = m_context.getDeviceInterface();
884 const VkDevice device = m_context.getDevice();
885 const VkQueue queue = m_context.getUniversalQueue();
886 Allocator& allocator = m_context.getDefaultAllocator();
887 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
888 const VkImage& compressed = imageData[0].getImage(0);
890 for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
891 imageData[ndx].resetViews();
893 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
894 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
896 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
897 const VkExtent3D extentCompressed = makeExtent3D(mipMapSizes[mipNdx]);
898 const VkImage& uncompressed = imageData[m_parameters.imagesCount -1].getImage(imageNdx);
899 const VkExtent3D extentUncompressed = imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
900 const VkDeviceSize bufferSizeComp = getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
902 const VkImageCreateInfo decompressedImageInfo =
904 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
905 DE_NULL, // const void* pNext;
906 0u, // VkImageCreateFlags flags;
907 VK_IMAGE_TYPE_2D, // VkImageType imageType;
908 VK_FORMAT_R8G8B8A8_UNORM, // VkFormat format;
909 extentCompressed, // VkExtent3D extent;
910 1u, // deUint32 mipLevels;
911 1u, // deUint32 arrayLayers;
912 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
913 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
914 VK_IMAGE_USAGE_SAMPLED_BIT |
915 VK_IMAGE_USAGE_STORAGE_BIT |
916 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
917 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
918 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
919 0u, // deUint32 queueFamilyIndexCount;
920 DE_NULL, // const deUint32* pQueueFamilyIndices;
921 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
924 const VkImageCreateInfo compressedImageInfo =
926 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
927 DE_NULL, // const void* pNext;
928 0u, // VkImageCreateFlags flags;
929 VK_IMAGE_TYPE_2D, // VkImageType imageType;
930 m_parameters.formatCompressed, // VkFormat format;
931 extentCompressed, // VkExtent3D extent;
932 1u, // deUint32 mipLevels;
933 1u, // deUint32 arrayLayers;
934 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
935 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
936 VK_IMAGE_USAGE_SAMPLED_BIT |
937 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
938 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
939 0u, // deUint32 queueFamilyIndexCount;
940 DE_NULL, // const deUint32* pQueueFamilyIndices;
941 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
943 const VkImageUsageFlags compressedViewUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
944 const VkImageViewUsageCreateInfoKHR compressedViewUsageCI =
946 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
947 DE_NULL, //const void* pNext;
948 compressedViewUsageFlags, //VkImageUsageFlags usage;
950 Image resultImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
951 Image referenceImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
952 Image uncompressedImage (vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
953 Move<VkImageView> resultView = makeImageView(vk, device, resultImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
954 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
955 Move<VkImageView> referenceView = makeImageView(vk, device, referenceImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
956 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
957 Move<VkImageView> uncompressedView = makeImageView(vk, device, uncompressedImage.get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
958 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
959 Move<VkImageView> compressedView = makeImageView(vk, device, compressed, mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
960 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
961 Move<VkDescriptorSetLayout> descriptorSetLayout = DescriptorSetLayoutBuilder()
962 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
963 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
964 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
965 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
967 Move<VkDescriptorPool> descriptorPool = DescriptorPoolBuilder()
968 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
969 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
970 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
971 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
972 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
974 Move<VkDescriptorSet> descriptorSet = makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
975 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
976 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
977 const VkDeviceSize bufferSize = getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), VK_FORMAT_R8G8B8A8_UNORM);
978 Buffer resultBuffer (vk, device, allocator,
979 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
980 Buffer referenceBuffer (vk, device, allocator,
981 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
982 Buffer transferBuffer (vk, device, allocator,
983 makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
984 Move<VkSampler> sampler;
986 const VkSamplerCreateInfo createInfo =
988 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
989 DE_NULL, //const void* pNext;
990 0u, //VkSamplerCreateFlags flags;
991 VK_FILTER_NEAREST, //VkFilter magFilter;
992 VK_FILTER_NEAREST, //VkFilter minFilter;
993 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
994 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
995 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
996 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
997 0.0f, //float mipLodBias;
998 VK_FALSE, //VkBool32 anisotropyEnable;
999 1.0f, //float maxAnisotropy;
1000 VK_FALSE, //VkBool32 compareEnable;
1001 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1002 0.0f, //float minLod;
1003 1.0f, //float maxLod;
1004 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1005 VK_FALSE, //VkBool32 unnormalizedCoordinates;
1007 sampler = createSampler(vk, device, &createInfo);
1010 VkDescriptorImageInfo descriptorImageInfos[] =
1012 makeDescriptorImageInfo(*sampler, *uncompressedView, VK_IMAGE_LAYOUT_GENERAL),
1013 makeDescriptorImageInfo(*sampler, *compressedView, VK_IMAGE_LAYOUT_GENERAL),
1014 makeDescriptorImageInfo(DE_NULL, *resultView, VK_IMAGE_LAYOUT_GENERAL),
1015 makeDescriptorImageInfo(DE_NULL, *referenceView, VK_IMAGE_LAYOUT_GENERAL)
1017 DescriptorSetUpdateBuilder()
1018 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1019 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1020 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1021 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1022 .update(vk, device);
1025 beginCommandBuffer(vk, cmdBuffer);
1027 const VkImageSubresourceRange subresourceRange =
1029 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1030 0u, //deUint32 baseMipLevel
1031 1u, //deUint32 levelCount
1032 0u, //deUint32 baseArrayLayer
1033 1u //deUint32 layerCount
1036 const VkBufferImageCopy copyRegion =
1038 0ull, // VkDeviceSize bufferOffset;
1039 0u, // deUint32 bufferRowLength;
1040 0u, // deUint32 bufferImageHeight;
1041 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1042 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1043 decompressedImageInfo.extent, // VkExtent3D imageExtent;
1046 const VkBufferImageCopy compressedCopyRegion =
1048 0ull, // VkDeviceSize bufferOffset;
1049 0u, // deUint32 bufferRowLength;
1050 0u, // deUint32 bufferImageHeight;
1051 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1052 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1053 extentUncompressed, // VkExtent3D imageExtent;
1058 const VkBufferMemoryBarrier preCopyBufferBarriers = makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1059 transferBuffer.get(), 0ull, bufferSizeComp);
1061 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1062 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1065 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1068 const VkBufferMemoryBarrier postCopyBufferBarriers = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1069 transferBuffer.get(), 0ull, bufferSizeComp);
1071 const VkImageMemoryBarrier preCopyImageBarriers = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1072 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1074 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1075 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1078 vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
1080 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1081 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1084 const VkImageMemoryBarrier preShaderImageBarriers[] =
1087 makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1088 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1089 uncompressedImage.get(), subresourceRange),
1091 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1092 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1093 resultImage.get(), subresourceRange),
1095 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1096 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1097 referenceImage.get(), subresourceRange)
1100 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1101 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1102 DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1105 vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1108 const VkImageMemoryBarrier postShaderImageBarriers[] =
1110 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1111 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1112 resultImage.get(), subresourceRange),
1114 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1115 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1116 referenceImage.get(), subresourceRange)
1119 const VkBufferMemoryBarrier preCopyBufferBarrier[] =
1121 makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1122 resultBuffer.get(), 0ull, bufferSize),
1124 makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1125 referenceBuffer.get(), 0ull, bufferSize),
1128 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1129 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(preCopyBufferBarrier), preCopyBufferBarrier,
1130 DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1132 vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, ©Region);
1133 vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, ©Region);
1135 endCommandBuffer(vk, cmdBuffer);
1136 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1138 const Allocation& resultAlloc = resultBuffer.getAllocation();
1139 const Allocation& referenceAlloc = referenceBuffer.getAllocation();
1140 invalidateMappedMemoryRange(vk, device, resultAlloc.getMemory(), resultAlloc.getOffset(), bufferSize);
1141 invalidateMappedMemoryRange(vk, device, referenceAlloc.getMemory(), referenceAlloc.getOffset(), bufferSize);
1143 ConstPixelBufferAccess resultPixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1144 ConstPixelBufferAccess referencePixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1146 if (decompressedImageInfo.extent.width > 2)
1148 if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1153 if (deMemCmp(resultAlloc.getHostPtr(), referenceAlloc.getHostPtr(), (size_t)bufferSize))
1160 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1163 ImageStoreComputeTestInstance (Context& context,
1164 const TestParameters& parameters);
1166 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
1167 const VkDescriptorSetLayout& descriptorSetLayout,
1168 const VkDescriptorPool& descriptorPool,
1169 vector<ImageData>& imageData);
1173 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1174 :BasicComputeTestInstance (context, parameters)
1178 void ImageStoreComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
1179 const VkDescriptorSetLayout& descriptorSetLayout,
1180 const VkDescriptorPool& descriptorPool,
1181 vector<ImageData>& imageData)
1183 const DeviceInterface& vk = m_context.getDeviceInterface();
1184 const VkDevice device = m_context.getDevice();
1185 const VkQueue queue = m_context.getUniversalQueue();
1186 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1187 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
1188 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
1189 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1190 Move<VkSampler> sampler;
1192 const VkSamplerCreateInfo createInfo =
1194 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1195 DE_NULL, //const void* pNext;
1196 0u, //VkSamplerCreateFlags flags;
1197 VK_FILTER_NEAREST, //VkFilter magFilter;
1198 VK_FILTER_NEAREST, //VkFilter minFilter;
1199 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1200 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1201 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1202 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1203 0.0f, //float mipLodBias;
1204 VK_FALSE, //VkBool32 anisotropyEnable;
1205 1.0f, //float maxAnisotropy;
1206 VK_FALSE, //VkBool32 compareEnable;
1207 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1208 0.0f, //float minLod;
1209 0.0f, //float maxLod;
1210 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1211 VK_TRUE, //VkBool32 unnormalizedCoordinates;
1213 sampler = createSampler(vk, device, &createInfo);
1216 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
1217 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1219 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1220 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1222 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1223 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1227 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1228 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1230 beginCommandBuffer(vk, cmdBuffer);
1232 const VkImageSubresourceRange compressedRange =
1234 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1235 0u, //deUint32 baseMipLevel
1236 imageData[0].getImageInfo(0).mipLevels, //deUint32 levelCount
1237 0u, //deUint32 baseArrayLayer
1238 imageData[0].getImageInfo(0).arrayLayers //deUint32 layerCount
1241 const VkImageSubresourceRange uncompressedRange =
1243 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1244 0u, //deUint32 baseMipLevel
1245 1u, //deUint32 levelCount
1246 0u, //deUint32 baseArrayLayer
1247 1u //deUint32 layerCount
1250 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1252 vector<VkImageMemoryBarrier> preShaderImageBarriers (descriptorSets.size() * 2u + 1u);
1253 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1255 preShaderImageBarriers[imageNdx] = makeImageMemoryBarrier(
1256 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1257 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1258 imageData[1].getImage(imageNdx), uncompressedRange);
1260 preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()] = makeImageMemoryBarrier(
1261 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1262 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1263 imageData[2].getImage(imageNdx), uncompressedRange);
1266 preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1267 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1268 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1269 imageData[0].getImage(0u), compressedRange);
1271 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1272 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1273 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1275 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1277 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1278 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1279 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
1280 imageData[1].getImageInfo(ndx).extent.height,
1281 imageData[1].getImageInfo(ndx).extent.depth);
1284 endCommandBuffer(vk, cmdBuffer);
1285 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1288 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1291 GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters);
1292 virtual TestStatus iterate (void);
1295 virtual bool isWriteToCompressedOperation ();
1296 VkImageCreateInfo makeCreateImageInfo (const VkFormat format,
1297 const ImageType type,
1299 const VkImageUsageFlags usageFlags,
1300 const VkImageCreateFlags* createFlags,
1301 const deUint32 levels,
1302 const deUint32 layers);
1303 VkDeviceSize getCompressedImageData (const VkFormat format,
1305 std::vector<deUint8>& data,
1306 const deUint32 layer,
1307 const deUint32 level);
1308 VkDeviceSize getUncompressedImageData (const VkFormat format,
1310 std::vector<deUint8>& data,
1311 const deUint32 layer,
1312 const deUint32 level);
1313 virtual void prepareData ();
1314 virtual void prepareVertexBuffer ();
1315 virtual void transcodeRead ();
1316 virtual void transcodeWrite ();
1317 bool verifyDecompression (const std::vector<deUint8>& refCompressedData,
1318 const de::MovePtr<Image>& resCompressedImage,
1319 const deUint32 layer,
1320 const deUint32 level,
1321 const UVec3& mipmapDims);
1323 typedef std::vector<deUint8> RawDataVector;
1324 typedef SharedPtr<RawDataVector> RawDataPtr;
1325 typedef std::vector<RawDataPtr> LevelData;
1326 typedef std::vector<LevelData> FullImageData;
1328 FullImageData m_srcData;
1329 FullImageData m_dstData;
1331 typedef SharedPtr<Image> ImagePtr;
1332 typedef std::vector<ImagePtr> LevelImages;
1333 typedef std::vector<LevelImages> ImagesArray;
1335 ImagesArray m_uncompressedImages;
1336 MovePtr<Image> m_compressedImage;
1338 VkImageViewUsageCreateInfoKHR m_imageViewUsageKHR;
1339 VkImageViewUsageCreateInfoKHR* m_srcImageViewUsageKHR;
1340 VkImageViewUsageCreateInfoKHR* m_dstImageViewUsageKHR;
1341 std::vector<tcu::UVec3> m_compressedImageResVec;
1342 std::vector<tcu::UVec3> m_uncompressedImageResVec;
1343 VkFormat m_srcFormat;
1344 VkFormat m_dstFormat;
1345 VkImageUsageFlags m_srcImageUsageFlags;
1346 VkImageUsageFlags m_dstImageUsageFlags;
1347 std::vector<tcu::UVec3> m_srcImageResolutions;
1348 std::vector<tcu::UVec3> m_dstImageResolutions;
1350 MovePtr<Buffer> m_vertexBuffer;
1351 deUint32 m_vertexCount;
1352 VkDeviceSize m_vertexBufferOffset;
1355 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1356 : BasicTranscodingTestInstance(context, parameters)
1359 , m_uncompressedImages()
1360 , m_compressedImage()
1361 , m_imageViewUsageKHR()
1362 , m_srcImageViewUsageKHR()
1363 , m_dstImageViewUsageKHR()
1364 , m_compressedImageResVec()
1365 , m_uncompressedImageResVec()
1368 , m_srcImageUsageFlags()
1369 , m_dstImageUsageFlags()
1370 , m_srcImageResolutions()
1371 , m_dstImageResolutions()
1374 , m_vertexBufferOffset(0ull)
1378 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1381 prepareVertexBuffer();
1383 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1384 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1385 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1387 if (isWriteToCompressedOperation())
1392 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1393 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1394 if (isWriteToCompressedOperation())
1396 if (!verifyDecompression(*m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1397 return TestStatus::fail("Images difference detected");
1401 if (!verifyDecompression(*m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1402 return TestStatus::fail("Images difference detected");
1405 return TestStatus::pass("Pass");
1408 void GraphicsAttachmentsTestInstance::prepareData ()
1410 VkImageViewUsageCreateInfoKHR* imageViewUsageKHRNull = (VkImageViewUsageCreateInfoKHR*)DE_NULL;
1412 m_imageViewUsageKHR = makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1414 m_srcImageViewUsageKHR = isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1415 m_dstImageViewUsageKHR = isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1417 m_srcFormat = isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1418 m_dstFormat = isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1420 m_srcImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1421 m_dstImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1423 m_compressedImageResVec = getMipLevelSizes(getLayerDims());
1424 m_uncompressedImageResVec = getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1426 m_srcImageResolutions = isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1427 m_dstImageResolutions = isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1429 m_srcData.resize(getLevelCount());
1430 m_dstData.resize(getLevelCount());
1431 m_uncompressedImages.resize(getLevelCount());
1433 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1435 m_srcData[levelNdx].resize(getLayerCount());
1436 m_dstData[levelNdx].resize(getLayerCount());
1437 m_uncompressedImages[levelNdx].resize(getLayerCount());
1439 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1441 m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1442 m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1444 if (isWriteToCompressedOperation())
1446 getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1448 m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1452 getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1454 m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1457 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1462 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1464 const DeviceInterface& vk = m_context.getDeviceInterface();
1465 const VkDevice device = m_context.getDevice();
1466 Allocator& allocator = m_context.getDefaultAllocator();
1468 const std::vector<tcu::Vec4> vertexArray = createFullscreenQuad();
1469 const size_t vertexBufferSizeInBytes = vertexArray.size() * sizeof(vertexArray[0]);
1471 m_vertexCount = static_cast<deUint32>(vertexArray.size());
1472 m_vertexBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1474 // Upload vertex data
1475 const Allocation& vertexBufferAlloc = m_vertexBuffer->getAllocation();
1476 deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1477 flushMappedMemoryRange(vk, device, vertexBufferAlloc.getMemory(), vertexBufferAlloc.getOffset(), vertexBufferSizeInBytes);
1480 void GraphicsAttachmentsTestInstance::transcodeRead ()
1482 const DeviceInterface& vk = m_context.getDeviceInterface();
1483 const VkDevice device = m_context.getDevice();
1484 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1485 const VkQueue queue = m_context.getUniversalQueue();
1486 Allocator& allocator = m_context.getDefaultAllocator();
1488 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1490 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1491 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1493 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1494 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1496 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1498 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1499 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1500 .build(vk, device));
1501 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1502 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1503 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1504 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1506 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1507 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1508 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1510 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1511 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1513 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1515 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1516 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1517 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1518 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1519 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1520 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1522 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1524 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1525 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1527 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1528 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1530 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1531 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1532 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
1534 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1536 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1537 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1539 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1541 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1542 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1544 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1545 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1546 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1547 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1548 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1549 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1551 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1552 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1553 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1555 // Upload source image data
1556 const Allocation& alloc = srcImageBuffer->getAllocation();
1557 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1558 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1560 beginCommandBuffer(vk, *cmdBuffer);
1561 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1563 // Copy buffer to image
1564 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1565 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1566 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1568 // Define destination image layout
1569 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1571 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1573 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1574 DescriptorSetUpdateBuilder()
1575 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1576 .update(vk, device);
1578 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1579 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1581 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1582 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1584 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1586 vk.cmdEndRenderPass(*cmdBuffer);
1588 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1589 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1590 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1591 dstImage->get(), dstSubresourceRange);
1593 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1594 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1595 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1597 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1598 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1599 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1601 endCommandBuffer(vk, *cmdBuffer);
1603 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1605 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1606 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1607 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1611 m_compressedImage = srcImage;
1614 void GraphicsAttachmentsTestInstance::transcodeWrite ()
1616 const DeviceInterface& vk = m_context.getDeviceInterface();
1617 const VkDevice device = m_context.getDevice();
1618 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1619 const VkQueue queue = m_context.getUniversalQueue();
1620 Allocator& allocator = m_context.getDefaultAllocator();
1622 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1624 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1625 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1627 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1628 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1630 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1632 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1633 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1634 .build(vk, device));
1635 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1636 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1637 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1638 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1640 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1641 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1642 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1644 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1645 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1647 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1649 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1650 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1651 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1652 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1653 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1654 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1656 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1658 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1659 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1660 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
1662 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1664 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1665 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1667 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1668 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1670 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1671 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1673 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1675 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1676 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1678 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1679 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1680 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1681 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1682 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1683 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1685 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1686 const VkExtent2D framebufferSize (renderSize);
1687 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1689 // Upload source image data
1690 const Allocation& alloc = srcImageBuffer->getAllocation();
1691 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1692 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1694 beginCommandBuffer(vk, *cmdBuffer);
1695 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1697 // Copy buffer to image
1698 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1699 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1700 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1702 // Define destination image layout
1703 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1705 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1707 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1708 DescriptorSetUpdateBuilder()
1709 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1710 .update(vk, device);
1712 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1713 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1715 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1716 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1718 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1720 vk.cmdEndRenderPass(*cmdBuffer);
1722 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1723 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1724 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1725 dstImage->get(), dstSubresourceRange);
1727 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1728 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1729 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1731 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1732 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1733 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1735 endCommandBuffer(vk, *cmdBuffer);
1737 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1739 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1740 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1741 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1745 m_compressedImage = dstImage;
1748 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1750 return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1753 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat format,
1754 const ImageType type,
1756 const VkImageUsageFlags usageFlags,
1757 const VkImageCreateFlags* createFlags,
1758 const deUint32 levels,
1759 const deUint32 layers)
1761 const VkImageType imageType = mapImageType(type);
1762 const VkImageCreateFlags imageCreateFlagsBase = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1763 const VkImageCreateFlags imageCreateFlagsAddOn = isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR : 0;
1764 const VkImageCreateFlags imageCreateFlags = (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1766 const VkImageCreateInfo createImageInfo =
1768 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1769 DE_NULL, // const void* pNext;
1770 imageCreateFlags, // VkImageCreateFlags flags;
1771 imageType, // VkImageType imageType;
1772 format, // VkFormat format;
1773 makeExtent3D(getLayerSize(type, size)), // VkExtent3D extent;
1774 levels, // deUint32 mipLevels;
1775 layers, // deUint32 arrayLayers;
1776 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1777 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1778 usageFlags, // VkImageUsageFlags usage;
1779 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1780 0u, // deUint32 queueFamilyIndexCount;
1781 DE_NULL, // const deUint32* pQueueFamilyIndices;
1782 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1785 return createImageInfo;
1788 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat format,
1790 std::vector<deUint8>& data,
1791 const deUint32 layer,
1792 const deUint32 level)
1794 VkDeviceSize sizeBytes = getCompressedImageSizeInBytes(format, size);
1796 data.resize((size_t)sizeBytes);
1797 generateData(&data[0], data.size(), format, layer, level);
1802 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat format,
1804 std::vector<deUint8>& data,
1805 const deUint32 layer,
1806 const deUint32 level)
1808 tcu::IVec3 sizeAsIVec3 = tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
1809 VkDeviceSize sizeBytes = getImageSizeBytes(sizeAsIVec3, format);
1811 data.resize((size_t)sizeBytes);
1812 generateData(&data[0], data.size(), format, layer, level);
1817 bool GraphicsAttachmentsTestInstance::verifyDecompression (const std::vector<deUint8>& refCompressedData,
1818 const de::MovePtr<Image>& resCompressedImage,
1819 const deUint32 level,
1820 const deUint32 layer,
1821 const UVec3& mipmapDims)
1823 const DeviceInterface& vk = m_context.getDeviceInterface();
1824 const VkDevice device = m_context.getDevice();
1825 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1826 const VkQueue queue = m_context.getUniversalQueue();
1827 Allocator& allocator = m_context.getDefaultAllocator();
1829 const UVec3 mipmapDimsBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
1831 const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1832 const VkImageSubresourceRange resSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
1834 const VkDeviceSize dstBufferSize = getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
1835 const VkImageUsageFlags refSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1837 const VkBufferCreateInfo refSrcImageBufferInfo (makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
1838 const MovePtr<Buffer> refSrcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
1840 const VkImageCreateFlags refSrcImageCreateFlags = 0;
1841 const VkImageCreateInfo refSrcImageCreateInfo = makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1842 const MovePtr<Image> refSrcImage (new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
1843 Move<VkImageView> refSrcImageView (makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
1845 const VkImageUsageFlags resSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1846 const VkImageViewUsageCreateInfoKHR resSrcImageViewUsageKHR = makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
1847 Move<VkImageView> resSrcImageView (makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
1849 const VkImageCreateFlags refDstImageCreateFlags = 0;
1850 const VkImageUsageFlags refDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1851 const VkImageCreateInfo refDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1852 const MovePtr<Image> refDstImage (new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
1853 const Move<VkImageView> refDstImageView (makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1854 const VkImageMemoryBarrier refDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
1855 const VkBufferCreateInfo refDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1856 const MovePtr<Buffer> refDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
1858 const VkImageCreateFlags resDstImageCreateFlags = 0;
1859 const VkImageUsageFlags resDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1860 const VkImageCreateInfo resDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1861 const MovePtr<Image> resDstImage (new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
1862 const Move<VkImageView> resDstImageView (makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1863 const VkImageMemoryBarrier resDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
1864 const VkBufferCreateInfo resDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1865 const MovePtr<Buffer> resDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
1867 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1868 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
1870 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
1872 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1873 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1874 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1875 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1876 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1877 .build(vk, device));
1878 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1879 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1880 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1881 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1882 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1883 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1884 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1885 const VkSamplerCreateInfo refSrcSamplerInfo (makeSamplerCreateInfo());
1886 const Move<VkSampler> refSrcSampler = vk::createSampler(vk, device, &refSrcSamplerInfo);
1887 const VkSamplerCreateInfo resSrcSamplerInfo (makeSamplerCreateInfo());
1888 const Move<VkSampler> resSrcSampler = vk::createSampler(vk, device, &resSrcSamplerInfo);
1889 const VkDescriptorImageInfo descriptorRefSrcImage (makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, VK_IMAGE_LAYOUT_GENERAL));
1890 const VkDescriptorImageInfo descriptorResSrcImage (makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, VK_IMAGE_LAYOUT_GENERAL));
1891 const VkDescriptorImageInfo descriptorRefDstImage (makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1892 const VkDescriptorImageInfo descriptorResDstImage (makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1894 const VkExtent2D renderSize (makeExtent2D(mipmapDims.x(), mipmapDims.y()));
1895 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1896 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
1897 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT, queueFamilyIndex));
1898 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1900 const VkBufferImageCopy copyBufferToImageRegion = makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
1901 const VkBufferImageCopy copyRegion = makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
1902 const VkBufferMemoryBarrier refSrcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
1903 const VkImageMemoryBarrier refSrcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1904 const VkImageMemoryBarrier refSrcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1906 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize, getLayerCount()));
1908 // Upload source image data
1910 const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
1911 deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
1912 flushMappedMemoryRange(vk, device, refSrcImageBufferAlloc.getMemory(), refSrcImageBufferAlloc.getOffset(), refCompressedData.size());
1915 beginCommandBuffer(vk, *cmdBuffer);
1916 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1918 // Copy buffer to image
1919 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
1920 vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, ©BufferToImageRegion);
1921 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
1923 // Make reference and result images readable
1924 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
1925 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
1927 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1929 DescriptorSetUpdateBuilder()
1930 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
1931 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
1932 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
1933 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
1934 .update(vk, device);
1936 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1937 vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1938 vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
1940 vk.cmdEndRenderPass(*cmdBuffer);
1942 // Decompress reference image
1944 const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
1945 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1946 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1947 refDstImage->get(), subresourceRange);
1949 const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
1950 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1951 refDstBuffer->get(), 0ull, dstBufferSize);
1953 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
1954 vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, ©Region);
1955 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1958 // Decompress result image
1960 const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
1961 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1962 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1963 resDstImage->get(), subresourceRange);
1965 const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
1966 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1967 resDstBuffer->get(), 0ull, dstBufferSize);
1969 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
1970 vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, ©Region);
1971 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1974 endCommandBuffer(vk, *cmdBuffer);
1976 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1978 // Compare decompressed pixel data in reference and result images
1980 const Allocation& refDstBufferAlloc = refDstBuffer->getAllocation();
1981 invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
1983 const Allocation& resDstBufferAlloc = resDstBuffer->getAllocation();
1984 invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
1986 if (deMemCmp(refDstBufferAlloc.getHostPtr(), resDstBufferAlloc.getHostPtr(), (size_t)dstBufferSize) != 0)
1988 // Do fuzzy to log error mask
1989 invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
1990 invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
1992 tcu::ConstPixelBufferAccess resPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
1993 tcu::ConstPixelBufferAccess refPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
1995 string comment = string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
1997 if (isWriteToCompressedOperation())
1998 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2000 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2010 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2013 GraphicsTextureTestInstance (Context& context, const TestParameters& parameters);
2016 virtual bool isWriteToCompressedOperation ();
2017 virtual void transcodeRead ();
2018 virtual void transcodeWrite ();
2021 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2022 : GraphicsAttachmentsTestInstance(context, parameters)
2026 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2028 return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2031 void GraphicsTextureTestInstance::transcodeRead ()
2033 const DeviceInterface& vk = m_context.getDeviceInterface();
2034 const VkDevice device = m_context.getDevice();
2035 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2036 const VkQueue queue = m_context.getUniversalQueue();
2037 Allocator& allocator = m_context.getDefaultAllocator();
2039 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2041 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2042 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2044 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2045 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2047 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2049 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2050 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2051 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2052 .build(vk, device));
2053 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2054 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2055 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2056 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2057 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2059 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2060 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2061 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2063 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2064 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2066 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2068 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2069 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2070 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2071 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2072 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2073 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2075 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2077 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2078 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2080 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2081 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2083 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2084 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2085 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
2087 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2089 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2090 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2092 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2094 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2095 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2097 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2098 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2099 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2100 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2102 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2103 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2104 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2105 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2106 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2107 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2109 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2110 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2112 // Upload source image data
2113 const Allocation& alloc = srcImageBuffer->getAllocation();
2114 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2115 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2117 beginCommandBuffer(vk, *cmdBuffer);
2118 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2120 // Copy buffer to image
2121 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2122 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2123 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2125 // Define destination image layout
2126 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2128 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2130 DescriptorSetUpdateBuilder()
2131 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2132 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2133 .update(vk, device);
2135 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2136 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2138 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2139 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2141 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2143 vk.cmdEndRenderPass(*cmdBuffer);
2145 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2146 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2147 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2148 dstImage->get(), dstSubresourceRange);
2150 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2151 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2152 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2154 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2155 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2156 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2158 endCommandBuffer(vk, *cmdBuffer);
2160 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2162 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2163 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2164 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2168 m_compressedImage = srcImage;
2171 void GraphicsTextureTestInstance::transcodeWrite ()
2173 const DeviceInterface& vk = m_context.getDeviceInterface();
2174 const VkDevice device = m_context.getDevice();
2175 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2176 const VkQueue queue = m_context.getUniversalQueue();
2177 Allocator& allocator = m_context.getDefaultAllocator();
2179 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2181 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2182 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2184 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2185 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2187 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2189 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2190 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2191 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2192 .build(vk, device));
2193 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2194 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2195 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2196 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2197 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2199 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2200 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2201 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2203 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2204 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2206 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2208 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2209 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2210 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2211 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2212 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2213 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2215 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2217 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2218 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2219 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
2221 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2223 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2224 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2226 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2227 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2229 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2230 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2232 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2234 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2235 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2237 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2238 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2239 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2240 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2242 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2243 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2244 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2245 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2246 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2247 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2249 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2250 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2252 // Upload source image data
2253 const Allocation& alloc = srcImageBuffer->getAllocation();
2254 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2255 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2257 beginCommandBuffer(vk, *cmdBuffer);
2258 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2260 // Copy buffer to image
2261 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2262 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2263 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2265 // Define destination image layout
2266 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2268 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2270 DescriptorSetUpdateBuilder()
2271 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2272 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2273 .update(vk, device);
2275 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2276 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2278 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2279 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2281 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2283 vk.cmdEndRenderPass(*cmdBuffer);
2285 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2286 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2287 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2288 dstImage->get(), dstSubresourceRange);
2290 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2291 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2292 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2294 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2295 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2296 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2298 endCommandBuffer(vk, *cmdBuffer);
2300 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2302 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2303 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2304 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2308 m_compressedImage = dstImage;
2311 class TexelViewCompatibleCase : public TestCase
2314 TexelViewCompatibleCase (TestContext& testCtx,
2315 const std::string& name,
2316 const std::string& desc,
2317 const TestParameters& parameters);
2318 void initPrograms (SourceCollections& programCollection) const;
2319 TestInstance* createInstance (Context& context) const;
2321 const TestParameters m_parameters;
2324 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
2325 : TestCase (testCtx, name, desc)
2326 , m_parameters (parameters)
2330 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections& programCollection) const
2332 DE_ASSERT(m_parameters.size.x() > 0);
2333 DE_ASSERT(m_parameters.size.y() > 0);
2335 switch (m_parameters.shader)
2337 case SHADER_TYPE_COMPUTE:
2339 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2340 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2341 std::ostringstream src;
2342 std::ostringstream src_decompress;
2344 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2345 << "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2346 src_decompress << src.str();
2348 switch(m_parameters.operation)
2350 case OPERATION_IMAGE_LOAD:
2352 src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2353 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2354 << "void main (void)\n"
2356 << " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2357 << " imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2363 case OPERATION_TEXEL_FETCH:
2365 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2366 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2367 << "void main (void)\n"
2369 << " ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2370 << " imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n"
2376 case OPERATION_TEXTURE:
2378 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2379 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2380 << "void main (void)\n"
2382 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x - 1, gl_NumWorkGroups.y - 1);\n"
2383 << " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2384 << " const vec2 coord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2385 << " imageStore(u_image1, pos, texture(u_image0, coord));\n"
2391 case OPERATION_IMAGE_STORE:
2393 src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<" u_image0;\n"
2394 << "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image1;\n"
2395 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2396 << "void main (void)\n"
2398 << " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2399 << " imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2400 << " imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2410 src_decompress << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2411 << "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_reference;\n"
2412 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_result;\n"
2413 << "layout (binding = 3, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_reference;\n\n"
2414 << "void main (void)\n"
2416 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2417 << " const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2418 << " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n"
2419 << " imageStore(decompressed_result, pos, texture(compressed_result, cord));\n"
2420 << " imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n"
2422 programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2423 programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2428 case SHADER_TYPE_FRAGMENT:
2430 ImageType imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2434 std::ostringstream src;
2435 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2436 << "layout(location = 0) in vec4 v_in_position;\n"
2438 << "void main (void)\n"
2440 << " gl_Position = v_in_position;\n"
2443 programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2448 switch(m_parameters.operation)
2450 case OPERATION_ATTACHMENT_READ:
2451 case OPERATION_ATTACHMENT_WRITE:
2453 std::ostringstream src;
2455 const std::string dstTypeStr = getGlslFormatType(m_parameters.formatUncompressed);
2456 const std::string srcTypeStr = getGlslInputFormatType(m_parameters.formatUncompressed);
2458 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2459 << "precision highp int;\n"
2460 << "precision highp float;\n"
2462 << "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2463 << "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2465 << "void main (void)\n"
2467 << " o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2470 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2475 case OPERATION_TEXTURE_READ:
2476 case OPERATION_TEXTURE_WRITE:
2478 std::ostringstream src;
2480 const std::string srcSamplerTypeStr = getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2481 const std::string dstImageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2482 const std::string dstFormatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2484 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2485 << "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2486 << "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2488 << "void main (void)\n"
2490 << " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2491 << " const ivec2 pixels_resolution = ivec2(textureSize(u_imageIn, 0)) - ivec2(1,1);\n"
2492 << " const vec2 in_pos = vec2(out_pos) / vec2(pixels_resolution);\n"
2493 << " imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2496 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2506 // Verification fragment shader
2508 std::ostringstream src;
2510 const std::string samplerType = getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2511 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2512 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2514 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2515 << "layout (binding = 0) uniform " << samplerType << " u_imageIn0;\n"
2516 << "layout (binding = 1) uniform " << samplerType << " u_imageIn1;\n"
2517 << "layout (binding = 2, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut0;\n"
2518 << "layout (binding = 3, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut1;\n"
2520 << "void main (void)\n"
2522 << " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2524 << " const ivec2 pixels_resolution0 = ivec2(textureSize(u_imageIn0, 0)) - ivec2(1,1);\n"
2525 << " const vec2 in_pos0 = vec2(out_pos) / vec2(pixels_resolution0);\n"
2526 << " imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2528 << " const ivec2 pixels_resolution1 = ivec2(textureSize(u_imageIn1, 0)) - ivec2(1,1);\n"
2529 << " const vec2 in_pos1 = vec2(out_pos) / vec2(pixels_resolution1);\n"
2530 << " imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2533 programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2544 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2546 const VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
2547 const InstanceInterface& vk = context.getInstanceInterface();
2549 if (!m_parameters.useMipmaps)
2551 DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size) == 1u);
2552 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).z() == 1u);
2555 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() > 0u);
2556 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() > 0u);
2558 if (std::find(context.getDeviceExtensions().begin(), context.getDeviceExtensions().end(), "VK_KHR_maintenance2") == context.getDeviceExtensions().end())
2559 TCU_THROW(NotSupportedError, "Extension VK_KHR_maintenance2 not supported");
2562 VkImageFormatProperties imageFormatProperties;
2564 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2565 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2566 m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties))
2567 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2569 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2570 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2571 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2572 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
2573 &imageFormatProperties))
2574 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2578 const VkPhysicalDeviceFeatures physicalDeviceFeatures = getPhysicalDeviceFeatures (vk, physicalDevice);
2580 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2581 !physicalDeviceFeatures.textureCompressionBC)
2582 TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2584 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2585 !physicalDeviceFeatures.textureCompressionETC2)
2586 TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2588 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK) &&
2589 !physicalDeviceFeatures.textureCompressionASTC_LDR)
2590 TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2593 switch (m_parameters.shader)
2595 case SHADER_TYPE_COMPUTE:
2597 switch (m_parameters.operation)
2599 case OPERATION_IMAGE_LOAD:
2600 case OPERATION_TEXEL_FETCH:
2601 case OPERATION_TEXTURE:
2602 return new BasicComputeTestInstance(context, m_parameters);
2603 case OPERATION_IMAGE_STORE:
2604 return new ImageStoreComputeTestInstance(context, m_parameters);
2606 TCU_THROW(InternalError, "Impossible");
2610 case SHADER_TYPE_FRAGMENT:
2612 switch (m_parameters.operation)
2614 case OPERATION_ATTACHMENT_READ:
2615 case OPERATION_ATTACHMENT_WRITE:
2616 return new GraphicsAttachmentsTestInstance(context, m_parameters);
2618 case OPERATION_TEXTURE_READ:
2619 case OPERATION_TEXTURE_WRITE:
2620 return new GraphicsTextureTestInstance(context, m_parameters);
2623 TCU_THROW(InternalError, "Impossible");
2628 TCU_THROW(InternalError, "Impossible");
2634 static tcu::UVec3 getUnniceResolution(const VkFormat format, const deUint32 layers)
2636 const deUint32 unniceMipmapTextureSize[] = { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2637 const deUint32 baseTextureWidth = unniceMipmapTextureSize[getBlockWidth(format)];
2638 const deUint32 baseTextureHeight = unniceMipmapTextureSize[getBlockHeight(format)];
2639 const deUint32 baseTextureWidthLevels = deLog2Floor32(baseTextureWidth);
2640 const deUint32 baseTextureHeightLevels = deLog2Floor32(baseTextureHeight);
2641 const deUint32 widthMultiplier = (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2642 const deUint32 heightMultiplier = (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2643 const deUint32 width = baseTextureWidth * widthMultiplier;
2644 const deUint32 height = baseTextureHeight * heightMultiplier;
2646 // Number of levels should be same on both axises
2647 DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
2649 return tcu::UVec3(width, height, layers);
2652 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
2656 const VkFormat* formats;
2660 const bool mipmapness[] =
2666 const std::string pipelineName[SHADER_TYPE_LAST] =
2672 const std::string mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)] =
2678 const std::string operationName[OPERATION_LAST] =
2690 const VkImageUsageFlags baseImageUsageFlagSet = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2691 const VkImageUsageFlags compressedImageUsageFlags[OPERATION_LAST] =
2693 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), // "image_load"
2694 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texel_fetch"
2695 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture"
2696 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "image_store"
2697 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), // "attachment_read"
2698 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT), // "attachment_write"
2699 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), // "texture_read"
2700 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture_write"
2703 const VkImageUsageFlags compressedImageViewUsageFlags[OPERATION_LAST] =
2705 compressedImageUsageFlags[0], //"image_load"
2706 compressedImageUsageFlags[1], //"texel_fetch"
2707 compressedImageUsageFlags[2], //"texture"
2708 compressedImageUsageFlags[3], //"image_store"
2709 compressedImageUsageFlags[4], //"attachment_read"
2710 compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, //"attachment_write"
2711 compressedImageUsageFlags[6], //"texture_read"
2712 compressedImageUsageFlags[7], //"texture_write"
2715 const VkImageUsageFlags uncompressedImageUsageFlags[OPERATION_LAST] =
2717 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), //"image_load"
2718 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texel_fetch"
2719 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texture"
2720 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"image_store"
2721 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"attachment_read"
2722 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT), //"attachment_write"
2723 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_read"
2724 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_write"
2727 const VkFormat compressedFormats64bit[] =
2729 VK_FORMAT_BC1_RGB_UNORM_BLOCK,
2730 VK_FORMAT_BC1_RGB_SRGB_BLOCK,
2731 VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
2732 VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
2733 VK_FORMAT_BC4_UNORM_BLOCK,
2734 VK_FORMAT_BC4_SNORM_BLOCK,
2735 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
2736 VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
2737 VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
2738 VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
2739 VK_FORMAT_EAC_R11_UNORM_BLOCK,
2740 VK_FORMAT_EAC_R11_SNORM_BLOCK,
2743 const VkFormat compressedFormats128bit[] =
2745 VK_FORMAT_BC2_UNORM_BLOCK,
2746 VK_FORMAT_BC2_SRGB_BLOCK,
2747 VK_FORMAT_BC3_UNORM_BLOCK,
2748 VK_FORMAT_BC3_SRGB_BLOCK,
2749 VK_FORMAT_BC5_UNORM_BLOCK,
2750 VK_FORMAT_BC5_SNORM_BLOCK,
2751 VK_FORMAT_BC6H_UFLOAT_BLOCK,
2752 VK_FORMAT_BC6H_SFLOAT_BLOCK,
2753 VK_FORMAT_BC7_UNORM_BLOCK,
2754 VK_FORMAT_BC7_SRGB_BLOCK,
2755 VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
2756 VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
2757 VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
2758 VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
2759 VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
2760 VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
2761 VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
2762 VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
2763 VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
2764 VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
2765 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
2766 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
2767 VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
2768 VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
2769 VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
2770 VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
2771 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
2772 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
2773 VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
2774 VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
2775 VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
2776 VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
2777 VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
2778 VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
2779 VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
2780 VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
2781 VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
2782 VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
2783 VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
2784 VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
2785 VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
2786 VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
2789 const VkFormat uncompressedFormats64bit[] =
2791 VK_FORMAT_R16G16B16A16_UNORM,
2792 VK_FORMAT_R16G16B16A16_SNORM,
2793 VK_FORMAT_R16G16B16A16_USCALED,
2794 VK_FORMAT_R16G16B16A16_SSCALED,
2795 VK_FORMAT_R16G16B16A16_UINT,
2796 VK_FORMAT_R16G16B16A16_SINT,
2797 VK_FORMAT_R16G16B16A16_SFLOAT,
2798 VK_FORMAT_R32G32_UINT,
2799 VK_FORMAT_R32G32_SINT,
2800 VK_FORMAT_R32G32_SFLOAT,
2801 //VK_FORMAT_R64_UINT, remove from the test it couln'd not be use
2802 //VK_FORMAT_R64_SINT, remove from the test it couln'd not be use
2803 //VK_FORMAT_R64_SFLOAT, remove from the test it couln'd not be use
2806 const VkFormat uncompressedFormats128bit[] =
2808 VK_FORMAT_R32G32B32A32_UINT,
2809 VK_FORMAT_R32G32B32A32_SINT,
2810 VK_FORMAT_R32G32B32A32_SFLOAT,
2811 //VK_FORMAT_R64G64_UINT, remove from the test it couln'd not be use
2812 //VK_FORMAT_R64G64_SINT, remove from the test it couln'd not be use
2813 //VK_FORMAT_R64G64_SFLOAT, remove from the test it couln'd not be use
2816 const FormatsArray formatsCompressedSets[] =
2819 compressedFormats64bit,
2820 DE_LENGTH_OF_ARRAY(compressedFormats64bit)
2823 compressedFormats128bit,
2824 DE_LENGTH_OF_ARRAY(compressedFormats128bit)
2828 const FormatsArray formatsUncompressedSets[] =
2831 uncompressedFormats64bit,
2832 DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
2835 uncompressedFormats128bit,
2836 DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
2840 DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
2842 MovePtr<tcu::TestCaseGroup> texelViewCompatibleTests (new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
2844 for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
2846 MovePtr<tcu::TestCaseGroup> pipelineTypeGroup (new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
2848 for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
2850 const bool mipmapTest = mipmapness[mipmapTestNdx];
2852 MovePtr<tcu::TestCaseGroup> mipmapTypeGroup (new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
2854 for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
2856 if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
2859 if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
2862 MovePtr<tcu::TestCaseGroup> imageOperationGroup (new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
2864 // Iterate through bitness groups (64 bit, 128 bit, etc)
2865 for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
2867 for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
2869 const VkFormat formatCompressed = formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
2870 const std::string compressedFormatGroupName = getFormatShortString(formatCompressed);
2871 MovePtr<tcu::TestCaseGroup> compressedFormatGroup (new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
2873 for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
2875 const VkFormat formatUncompressed = formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
2876 const std::string uncompressedFormatGroupName = getFormatShortString(formatUncompressed);
2877 const TestParameters parameters =
2879 static_cast<Operation>(operationNdx),
2880 static_cast<ShaderType>(shaderType),
2881 mipmapTest ? getUnniceResolution(formatCompressed, 3u) : UVec3(64u, 64u, 1u),
2885 (operationNdx == OPERATION_IMAGE_STORE) ? 3u : 2u,
2886 compressedImageUsageFlags[operationNdx],
2887 compressedImageViewUsageFlags[operationNdx],
2888 uncompressedImageUsageFlags[operationNdx],
2890 VK_FORMAT_R8G8B8A8_UNORM
2893 compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
2896 imageOperationGroup->addChild(compressedFormatGroup.release());
2900 mipmapTypeGroup->addChild(imageOperationGroup.release());
2903 pipelineTypeGroup->addChild(mipmapTypeGroup.release());
2906 texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
2909 return texelViewCompatibleTests.release();