1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2017 The Khronos Group Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
20 * \file vktImageCompressionTranscodingSupport.cpp
21 * \brief Compression transcoding support
22 *//*--------------------------------------------------------------------*/
24 #include "vktImageCompressionTranscodingSupport.hpp"
26 #include "deUniquePtr.hpp"
27 #include "deStringUtil.hpp"
28 #include "deSharedPtr.hpp"
29 #include "deRandom.hpp"
31 #include "vktTestCaseUtil.hpp"
32 #include "vkPrograms.hpp"
33 #include "vkImageUtil.hpp"
34 #include "vktImageTestsUtil.hpp"
35 #include "vkBuilderUtil.hpp"
37 #include "vkRefUtil.hpp"
38 #include "vkTypeUtil.hpp"
39 #include "vkQueryUtil.hpp"
41 #include "tcuTextureUtil.hpp"
42 #include "tcuTexture.hpp"
43 #include "tcuCompressedTexture.hpp"
44 #include "tcuVectorType.hpp"
45 #include "tcuResource.hpp"
46 #include "tcuImageIO.hpp"
47 #include "tcuImageCompare.hpp"
48 #include "tcuTestLog.hpp"
49 #include "tcuRGBA.hpp"
50 #include "tcuSurface.hpp"
62 using tcu::TestContext;
63 using tcu::TestStatus;
66 using tcu::CompressedTexFormat;
67 using tcu::CompressedTexture;
70 using tcu::ConstPixelBufferAccess;
75 typedef SharedPtr<MovePtr<Image> > ImageSp;
76 typedef SharedPtr<Move<VkImageView> > ImageViewSp;
77 typedef SharedPtr<Move<VkDescriptorSet> > SharedVkDescriptorSet;
89 OPERATION_TEXEL_FETCH,
91 OPERATION_IMAGE_STORE,
92 OPERATION_ATTACHMENT_READ,
93 OPERATION_ATTACHMENT_WRITE,
94 OPERATION_TEXTURE_READ,
95 OPERATION_TEXTURE_WRITE,
105 VkFormat formatCompressed;
106 VkFormat formatUncompressed;
107 deUint32 imagesCount;
108 VkImageUsageFlags compressedImageUsage;
109 VkImageUsageFlags compressedImageViewUsage;
110 VkImageUsageFlags uncompressedImageUsage;
112 VkFormat formatForVerify;
116 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
118 return SharedPtr<Move<T> >(new Move<T>(move));
122 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
124 return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
127 const deUint32 SINGLE_LEVEL = 1u;
128 const deUint32 SINGLE_LAYER = 1u;
130 class BasicTranscodingTestInstance : public TestInstance
133 BasicTranscodingTestInstance (Context& context,
134 const TestParameters& parameters);
135 virtual TestStatus iterate (void) = 0;
137 void generateData (deUint8* toFill,
139 const VkFormat format,
140 const deUint32 layer = 0u,
141 const deUint32 level = 0u);
142 deUint32 getLevelCount ();
143 deUint32 getLayerCount ();
144 UVec3 getLayerDims ();
145 vector<UVec3> getMipLevelSizes (UVec3 baseSize);
146 vector<UVec3> getCompressedMipLevelSizes (const VkFormat compressedFormat,
147 const vector<UVec3>& uncompressedSizes);
149 const TestParameters m_parameters;
150 const deUint32 m_blockWidth;
151 const deUint32 m_blockHeight;
152 const deUint32 m_levelCount;
153 const UVec3 m_layerSize;
156 deUint32 findMipMapLevelCount ();
159 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
161 deUint32 levelCount = 1;
163 // We cannot use mipmap levels which have resolution below block size.
164 // Reduce number of mipmap levels
165 if (m_parameters.useMipmaps)
167 deUint32 w = m_parameters.size.x();
168 deUint32 h = m_parameters.size.y();
170 DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
172 while (w > m_blockWidth && h > m_blockHeight)
177 if (w > m_blockWidth && h > m_blockHeight)
181 DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
182 DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
188 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
189 : TestInstance (context)
190 , m_parameters (parameters)
191 , m_blockWidth (getBlockWidth(m_parameters.formatCompressed))
192 , m_blockHeight (getBlockHeight(m_parameters.formatCompressed))
193 , m_levelCount (findMipMapLevelCount())
194 , m_layerSize (getLayerSize(m_parameters.imageType, m_parameters.size))
196 DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
199 deUint32 BasicTranscodingTestInstance::getLevelCount()
204 deUint32 BasicTranscodingTestInstance::getLayerCount()
206 return m_parameters.size.z();
209 UVec3 BasicTranscodingTestInstance::getLayerDims()
214 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
216 vector<UVec3> levelSizes;
217 const deUint32 levelCount = getLevelCount();
219 DE_ASSERT(m_parameters.imageType == IMAGE_TYPE_2D || m_parameters.imageType == IMAGE_TYPE_2D_ARRAY);
223 levelSizes.push_back(baseSize);
225 while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
227 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
228 baseSize.y() = deMax32(baseSize.y() >> 1, 1);
229 levelSizes.push_back(baseSize);
232 DE_ASSERT(levelSizes.size() == getLevelCount());
237 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
239 vector<UVec3> levelSizes;
240 vector<UVec3>::const_iterator it;
242 for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
243 levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
248 void BasicTranscodingTestInstance::generateData (deUint8* toFill,
250 const VkFormat format,
251 const deUint32 layer,
252 const deUint32 level)
254 const deUint8 pattern[] =
257 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
258 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
259 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
260 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
261 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
262 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
263 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
264 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
265 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
266 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
267 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Positive infinity
268 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Negative infinity
269 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
270 0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
271 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
272 0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
273 0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
274 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
275 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
276 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
278 0x7F, 0x80, 0x00, 0x00, // Positive infinity
279 0xFF, 0x80, 0x00, 0x00, // Negative infinity
280 0x7F, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
281 0x7F, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
282 0xFF, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
283 0xFF, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
284 0x7F, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
285 0x7F, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
286 0xFF, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
287 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
288 0xAA, 0xAA, 0xAA, 0xAA,
289 0x55, 0x55, 0x55, 0x55,
292 deUint8* start = toFill;
293 size_t sizeToRnd = size;
296 if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
299 for (size_t i = 0; i < sizeof(pattern); i++)
300 start[sizeof(pattern) - i - 1] = pattern[i];
302 start += sizeof(pattern);
303 sizeToRnd -= sizeof(pattern);
306 deMemcpy(start, pattern, sizeof(pattern));
308 start += sizeof(pattern);
309 sizeToRnd -= sizeof(pattern);
314 DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
316 deUint32* start32 = reinterpret_cast<deUint32*>(start);
317 size_t sizeToRnd32 = sizeToRnd / sizeof(deUint32);
318 deUint32 seed = (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
321 for (size_t i = 0; i < sizeToRnd32; i++)
322 start32[i] = rnd.getUint32();
326 // Remove certain values that may not be preserved based on the uncompressed view format
327 if (isSnormFormat(m_parameters.formatUncompressed))
329 for (size_t i = 0; i < size; i += 2)
331 // SNORM fix: due to write operation in SNORM format
332 // replaces 0x00 0x80 to 0x01 0x80
333 if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
337 else if (isFloatFormat(m_parameters.formatUncompressed))
339 tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
341 if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
343 for (size_t i = 0; i < size; i += 2)
345 // HALF_FLOAT fix: remove INF and NaN
346 if ((toFill[i+1] & 0x7C) == 0x7C)
350 else if (textureFormat.type == tcu::TextureFormat::FLOAT)
352 for (size_t i = 0; i < size; i += 4)
354 // HALF_FLOAT fix: remove INF and NaN
355 if ((toFill[i+1] & 0x7C) == 0x7C)
359 for (size_t i = 0; i < size; i += 4)
361 // FLOAT fix: remove INF, NaN, and denorm
363 if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
366 if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
374 class BasicComputeTestInstance : public BasicTranscodingTestInstance
377 BasicComputeTestInstance (Context& context,
378 const TestParameters& parameters);
379 TestStatus iterate (void);
383 deUint32 getImagesCount (void) { return static_cast<deUint32>(images.size()); }
384 deUint32 getImageViewCount (void) { return static_cast<deUint32>(imagesViews.size()); }
385 deUint32 getImageInfoCount (void) { return static_cast<deUint32>(imagesInfos.size()); }
386 VkImage getImage (const deUint32 ndx) { return **images[ndx]->get(); }
387 VkImageView getImageView (const deUint32 ndx) { return **imagesViews[ndx]; }
388 VkImageCreateInfo getImageInfo (const deUint32 ndx) { return imagesInfos[ndx]; }
389 void addImage (MovePtr<Image> image) { images.push_back(makeVkSharedPtr(image)); }
390 void addImageView (Move<VkImageView> imageView) { imagesViews.push_back(makeVkSharedPtr(imageView));}
391 void addImageInfo (const VkImageCreateInfo imageInfo) { imagesInfos.push_back(imageInfo); }
392 void resetViews () { imagesViews.clear(); }
394 vector<ImageSp> images;
395 vector<ImageViewSp> imagesViews;
396 vector<VkImageCreateInfo> imagesInfos;
398 void copyDataToImage (const VkCommandBuffer& cmdBuffer,
399 ImageData& imageData,
400 const vector<UVec3>& mipMapSizes,
401 const bool isCompressed);
402 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
403 const VkDescriptorSetLayout& descriptorSetLayout,
404 const VkDescriptorPool& descriptorPool,
405 vector<ImageData>& imageData);
406 bool copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
407 const VkImage& uncompressed,
408 const VkDeviceSize offset,
410 void descriptorSetUpdate (VkDescriptorSet descriptorSet,
411 const VkDescriptorImageInfo* descriptorImageInfos);
412 void createImageInfos (ImageData& imageData,
413 const vector<UVec3>& mipMapSizes,
414 const bool isCompressed);
415 bool decompressImage (const VkCommandBuffer& cmdBuffer,
416 vector<ImageData>& imageData,
417 const vector<UVec3>& mipMapSizes);
418 vector<deUint8> m_data;
422 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
423 :BasicTranscodingTestInstance (context, parameters)
427 TestStatus BasicComputeTestInstance::iterate (void)
429 const DeviceInterface& vk = m_context.getDeviceInterface();
430 const VkDevice device = m_context.getDevice();
431 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
432 Allocator& allocator = m_context.getDefaultAllocator();
433 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
434 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
435 const vector<UVec3> mipMapSizes = m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, m_parameters.size);
436 vector<ImageData> imageData (m_parameters.imagesCount);
437 const deUint32 compressedNdx = 0u;
438 const deUint32 resultImageNdx = m_parameters.imagesCount -1u;
440 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
442 const bool isCompressed = compressedNdx == imageNdx ? true : false;
443 createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
444 for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
446 imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
449 const VkImageViewUsageCreateInfoKHR imageViewUsageKHR =
451 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
452 DE_NULL, //const void* pNext;
453 m_parameters.compressedImageUsage, //VkImageUsageFlags usage;
455 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
456 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
458 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
459 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
460 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
461 &imageViewUsageKHR));
466 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
467 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
468 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
475 for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
477 size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
480 generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
483 switch(m_parameters.operation)
485 case OPERATION_IMAGE_LOAD:
486 case OPERATION_TEXEL_FETCH:
487 case OPERATION_TEXTURE:
488 copyDataToImage(*cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
490 case OPERATION_IMAGE_STORE:
491 copyDataToImage(*cmdBuffer, imageData[1], mipMapSizes, false);
499 Move<VkDescriptorSetLayout> descriptorSetLayout;
500 Move<VkDescriptorPool> descriptorPool;
502 DescriptorSetLayoutBuilder descriptorSetLayoutBuilder;
503 DescriptorPoolBuilder descriptorPoolBuilder;
504 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
506 switch(m_parameters.operation)
508 case OPERATION_IMAGE_LOAD:
509 case OPERATION_IMAGE_STORE:
510 descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
511 descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
513 case OPERATION_TEXEL_FETCH:
514 case OPERATION_TEXTURE:
515 descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
516 descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
523 descriptorSetLayout = descriptorSetLayoutBuilder.build(vk, device);
524 descriptorPool = descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
525 executeShader(*cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
528 VkDeviceSize offset = 0ull;
529 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
530 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
532 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
533 const UVec3 size = UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
534 imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
535 imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
536 if (!copyResultAndCompare(*cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
537 return TestStatus::fail("Fail");
538 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
542 if (!decompressImage(*cmdBuffer, imageData, mipMapSizes))
543 return TestStatus::fail("Fail");
544 return TestStatus::pass("Pass");
547 void BasicComputeTestInstance::copyDataToImage (const VkCommandBuffer& cmdBuffer,
548 ImageData& imageData,
549 const vector<UVec3>& mipMapSizes,
550 const bool isCompressed)
552 const DeviceInterface& vk = m_context.getDeviceInterface();
553 const VkDevice device = m_context.getDevice();
554 const VkQueue queue = m_context.getUniversalQueue();
555 Allocator& allocator = m_context.getDefaultAllocator();
557 Buffer imageBuffer (vk, device, allocator,
558 makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
559 MemoryRequirement::HostVisible);
560 VkDeviceSize offset = 0ull;
562 const Allocation& alloc = imageBuffer.getAllocation();
563 deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
564 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_data.size());
567 beginCommandBuffer(vk, cmdBuffer);
568 const VkImageSubresourceRange subresourceRange =
570 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
571 0u, //deUint32 baseMipLevel
572 imageData.getImageInfo(0u).mipLevels, //deUint32 levelCount
573 0u, //deUint32 baseArrayLayer
574 imageData.getImageInfo(0u).arrayLayers //deUint32 layerCount
577 for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
579 const VkImageMemoryBarrier preCopyImageBarrier = makeImageMemoryBarrier(
580 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
581 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
582 imageData.getImage(imageNdx), subresourceRange);
584 const VkBufferMemoryBarrier FlushHostCopyBarrier = makeBufferMemoryBarrier(
585 VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
586 imageBuffer.get(), 0ull, m_data.size());
588 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
589 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
591 for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
593 const VkExtent3D imageExtent = isCompressed ?
594 makeExtent3D(mipMapSizes[mipNdx]) :
595 imageData.getImageInfo(imageNdx).extent;
596 const VkBufferImageCopy copyRegion =
598 offset, //VkDeviceSize bufferOffset;
599 0u, //deUint32 bufferRowLength;
600 0u, //deUint32 bufferImageHeight;
601 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers), //VkImageSubresourceLayers imageSubresource;
602 makeOffset3D(0, 0, 0), //VkOffset3D imageOffset;
603 imageExtent, //VkExtent3D imageExtent;
606 vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
607 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
608 UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
609 imageData.getImageInfo(imageNdx).arrayLayers;
612 endCommandBuffer(vk, cmdBuffer);
613 submitCommandsAndWait(vk, device, queue, cmdBuffer);
616 void BasicComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
617 const VkDescriptorSetLayout& descriptorSetLayout,
618 const VkDescriptorPool& descriptorPool,
619 vector<ImageData>& imageData)
621 const DeviceInterface& vk = m_context.getDeviceInterface();
622 const VkDevice device = m_context.getDevice();
623 const VkQueue queue = m_context.getUniversalQueue();
624 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
625 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
626 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
627 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
628 Move<VkSampler> sampler;
630 const VkSamplerCreateInfo createInfo =
632 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
633 DE_NULL, //const void* pNext;
634 0u, //VkSamplerCreateFlags flags;
635 VK_FILTER_NEAREST, //VkFilter magFilter;
636 VK_FILTER_NEAREST, //VkFilter minFilter;
637 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
638 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
639 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
640 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
641 0.0f, //float mipLodBias;
642 VK_FALSE, //VkBool32 anisotropyEnable;
643 1.0f, //float maxAnisotropy;
644 VK_FALSE, //VkBool32 compareEnable;
645 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
646 0.0f, //float minLod;
647 0.0f, //float maxLod;
648 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
649 VK_FALSE, //VkBool32 unnormalizedCoordinates;
651 sampler = createSampler(vk, device, &createInfo);
654 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
655 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
657 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
658 for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
660 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
661 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
665 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
666 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
668 beginCommandBuffer(vk, cmdBuffer);
670 const VkImageSubresourceRange compressedRange =
672 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
673 0u, //deUint32 baseMipLevel
674 imageData[0].getImageInfo(0u).mipLevels, //deUint32 levelCount
675 0u, //deUint32 baseArrayLayer
676 imageData[0].getImageInfo(0u).arrayLayers //deUint32 layerCount
678 const VkImageSubresourceRange uncompressedRange =
680 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
681 0u, //deUint32 baseMipLevel
682 1u, //deUint32 levelCount
683 0u, //deUint32 baseArrayLayer
684 1u //deUint32 layerCount
687 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
689 vector<VkImageMemoryBarrier> preShaderImageBarriers;
690 preShaderImageBarriers.resize(descriptorSets.size() + 1u);
691 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
693 preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
694 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
695 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
696 imageData[1].getImage(imageNdx), uncompressedRange);
699 preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
700 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
701 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
702 imageData[0].getImage(0), compressedRange);
704 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
705 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
706 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
708 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
710 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
711 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
712 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
713 imageData[1].getImageInfo(ndx).extent.height,
714 imageData[1].getImageInfo(ndx).extent.depth);
717 endCommandBuffer(vk, cmdBuffer);
718 submitCommandsAndWait(vk, device, queue, cmdBuffer);
721 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
722 const VkImage& uncompressed,
723 const VkDeviceSize offset,
726 const DeviceInterface& vk = m_context.getDeviceInterface();
727 const VkQueue queue = m_context.getUniversalQueue();
728 const VkDevice device = m_context.getDevice();
729 Allocator& allocator = m_context.getDefaultAllocator();
731 VkDeviceSize imageResultSize = getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
732 Buffer imageBufferResult (vk, device, allocator,
733 makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
734 MemoryRequirement::HostVisible);
736 beginCommandBuffer(vk, cmdBuffer);
738 const VkImageSubresourceRange subresourceRange =
740 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
741 0u, //deUint32 baseMipLevel
742 1u, //deUint32 levelCount
743 0u, //deUint32 baseArrayLayer
744 1u //deUint32 layerCount
747 const VkBufferImageCopy copyRegion =
749 0ull, // VkDeviceSize bufferOffset;
750 0u, // deUint32 bufferRowLength;
751 0u, // deUint32 bufferImageHeight;
752 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
753 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
754 makeExtent3D(size), // VkExtent3D imageExtent;
757 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
758 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
759 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
760 uncompressed, subresourceRange);
762 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
763 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
764 imageBufferResult.get(), 0ull, imageResultSize);
766 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
767 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, ©Region);
768 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
770 endCommandBuffer(vk, cmdBuffer);
771 submitCommandsAndWait(vk, device, queue, cmdBuffer);
773 const Allocation& allocResult = imageBufferResult.getAllocation();
774 invalidateMappedMemoryRange(vk, device, allocResult.getMemory(), allocResult.getOffset(), imageResultSize);
775 if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
780 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
782 const DeviceInterface& vk = m_context.getDeviceInterface();
783 const VkDevice device = m_context.getDevice();
784 DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
786 switch(m_parameters.operation)
788 case OPERATION_IMAGE_LOAD:
789 case OPERATION_IMAGE_STORE:
791 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
792 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
797 case OPERATION_TEXEL_FETCH:
798 case OPERATION_TEXTURE:
800 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
802 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
803 bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
812 descriptorSetUpdateBuilder.update(vk, device);
815 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
817 const VkImageType imageType = mapImageType(m_parameters.imageType);
821 const VkExtent3D extentCompressed = makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
822 const VkImageCreateInfo compressedInfo =
824 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
825 DE_NULL, // const void* pNext;
826 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
827 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR |
828 VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, // VkImageCreateFlags flags;
829 imageType, // VkImageType imageType;
830 m_parameters.formatCompressed, // VkFormat format;
831 extentCompressed, // VkExtent3D extent;
832 static_cast<deUint32>(mipMapSizes.size()), // deUint32 mipLevels;
833 getLayerCount(), // deUint32 arrayLayers;
834 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
835 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
836 VK_IMAGE_USAGE_SAMPLED_BIT |
837 VK_IMAGE_USAGE_STORAGE_BIT |
838 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
839 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
840 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
841 0u, // deUint32 queueFamilyIndexCount;
842 DE_NULL, // const deUint32* pQueueFamilyIndices;
843 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
845 imageData.addImageInfo(compressedInfo);
849 for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
850 for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
852 const VkExtent3D extentUncompressed = m_parameters.useMipmaps ?
853 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
854 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, m_parameters.size));
855 const VkImageCreateInfo uncompressedInfo =
857 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
858 DE_NULL, // const void* pNext;
859 0u, // VkImageCreateFlags flags;
860 imageType, // VkImageType imageType;
861 m_parameters.formatUncompressed, // VkFormat format;
862 extentUncompressed, // VkExtent3D extent;
863 1u, // deUint32 mipLevels;
864 1u, // deUint32 arrayLayers;
865 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
866 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
867 m_parameters.uncompressedImageUsage |
868 VK_IMAGE_USAGE_SAMPLED_BIT, // VkImageUsageFlags usage;
869 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
870 0u, // deUint32 queueFamilyIndexCount;
871 DE_NULL, // const deUint32* pQueueFamilyIndices;
872 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
874 imageData.addImageInfo(uncompressedInfo);
879 bool BasicComputeTestInstance::decompressImage (const VkCommandBuffer& cmdBuffer,
880 vector<ImageData>& imageData,
881 const vector<UVec3>& mipMapSizes)
883 const DeviceInterface& vk = m_context.getDeviceInterface();
884 const VkDevice device = m_context.getDevice();
885 const VkQueue queue = m_context.getUniversalQueue();
886 Allocator& allocator = m_context.getDefaultAllocator();
887 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
888 const VkImage& compressed = imageData[0].getImage(0);
890 for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
891 imageData[ndx].resetViews();
893 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
894 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
896 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
897 const VkExtent3D extentCompressed = makeExtent3D(mipMapSizes[mipNdx]);
898 const VkImage& uncompressed = imageData[m_parameters.imagesCount -1].getImage(imageNdx);
899 const VkExtent3D extentUncompressed = imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
900 const VkDeviceSize bufferSizeComp = getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
902 const VkImageCreateInfo decompressedImageInfo =
904 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
905 DE_NULL, // const void* pNext;
906 0u, // VkImageCreateFlags flags;
907 VK_IMAGE_TYPE_2D, // VkImageType imageType;
908 VK_FORMAT_R8G8B8A8_UNORM, // VkFormat format;
909 extentCompressed, // VkExtent3D extent;
910 1u, // deUint32 mipLevels;
911 1u, // deUint32 arrayLayers;
912 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
913 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
914 VK_IMAGE_USAGE_SAMPLED_BIT |
915 VK_IMAGE_USAGE_STORAGE_BIT |
916 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
917 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
918 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
919 0u, // deUint32 queueFamilyIndexCount;
920 DE_NULL, // const deUint32* pQueueFamilyIndices;
921 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
924 const VkImageCreateInfo compressedImageInfo =
926 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
927 DE_NULL, // const void* pNext;
928 0u, // VkImageCreateFlags flags;
929 VK_IMAGE_TYPE_2D, // VkImageType imageType;
930 m_parameters.formatCompressed, // VkFormat format;
931 extentCompressed, // VkExtent3D extent;
932 1u, // deUint32 mipLevels;
933 1u, // deUint32 arrayLayers;
934 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
935 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
936 VK_IMAGE_USAGE_SAMPLED_BIT |
937 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
938 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
939 0u, // deUint32 queueFamilyIndexCount;
940 DE_NULL, // const deUint32* pQueueFamilyIndices;
941 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
943 const VkImageUsageFlags compressedViewUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
944 const VkImageViewUsageCreateInfoKHR compressedViewUsageCI =
946 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
947 DE_NULL, //const void* pNext;
948 compressedViewUsageFlags, //VkImageUsageFlags usage;
950 Image resultImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
951 Image referenceImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
952 Image uncompressedImage (vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
953 Move<VkImageView> resultView = makeImageView(vk, device, resultImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
954 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
955 Move<VkImageView> referenceView = makeImageView(vk, device, referenceImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
956 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
957 Move<VkImageView> uncompressedView = makeImageView(vk, device, uncompressedImage.get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
958 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
959 Move<VkImageView> compressedView = makeImageView(vk, device, compressed, mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
960 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
961 Move<VkDescriptorSetLayout> descriptorSetLayout = DescriptorSetLayoutBuilder()
962 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
963 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
964 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
965 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
967 Move<VkDescriptorPool> descriptorPool = DescriptorPoolBuilder()
968 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
969 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
970 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
971 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
972 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
974 Move<VkDescriptorSet> descriptorSet = makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
975 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
976 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
977 const VkDeviceSize bufferSize = getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), VK_FORMAT_R8G8B8A8_UNORM);
978 Buffer resultBuffer (vk, device, allocator,
979 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
980 Buffer referenceBuffer (vk, device, allocator,
981 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
982 Buffer transferBuffer (vk, device, allocator,
983 makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
984 Move<VkSampler> sampler;
986 const VkSamplerCreateInfo createInfo =
988 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
989 DE_NULL, //const void* pNext;
990 0u, //VkSamplerCreateFlags flags;
991 VK_FILTER_NEAREST, //VkFilter magFilter;
992 VK_FILTER_NEAREST, //VkFilter minFilter;
993 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
994 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
995 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
996 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
997 0.0f, //float mipLodBias;
998 VK_FALSE, //VkBool32 anisotropyEnable;
999 1.0f, //float maxAnisotropy;
1000 VK_FALSE, //VkBool32 compareEnable;
1001 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1002 0.0f, //float minLod;
1003 1.0f, //float maxLod;
1004 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1005 VK_FALSE, //VkBool32 unnormalizedCoordinates;
1007 sampler = createSampler(vk, device, &createInfo);
1010 VkDescriptorImageInfo descriptorImageInfos[] =
1012 makeDescriptorImageInfo(*sampler, *uncompressedView, VK_IMAGE_LAYOUT_GENERAL),
1013 makeDescriptorImageInfo(*sampler, *compressedView, VK_IMAGE_LAYOUT_GENERAL),
1014 makeDescriptorImageInfo(DE_NULL, *resultView, VK_IMAGE_LAYOUT_GENERAL),
1015 makeDescriptorImageInfo(DE_NULL, *referenceView, VK_IMAGE_LAYOUT_GENERAL)
1017 DescriptorSetUpdateBuilder()
1018 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1019 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1020 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1021 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1022 .update(vk, device);
1025 beginCommandBuffer(vk, cmdBuffer);
1027 const VkImageSubresourceRange subresourceRange =
1029 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1030 0u, //deUint32 baseMipLevel
1031 1u, //deUint32 levelCount
1032 0u, //deUint32 baseArrayLayer
1033 1u //deUint32 layerCount
1036 const VkBufferImageCopy copyRegion =
1038 0ull, // VkDeviceSize bufferOffset;
1039 0u, // deUint32 bufferRowLength;
1040 0u, // deUint32 bufferImageHeight;
1041 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1042 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1043 decompressedImageInfo.extent, // VkExtent3D imageExtent;
1046 const VkBufferImageCopy compressedCopyRegion =
1048 0ull, // VkDeviceSize bufferOffset;
1049 0u, // deUint32 bufferRowLength;
1050 0u, // deUint32 bufferImageHeight;
1051 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1052 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1053 extentUncompressed, // VkExtent3D imageExtent;
1058 const VkBufferMemoryBarrier preCopyBufferBarriers = makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1059 transferBuffer.get(), 0ull, bufferSizeComp);
1061 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1062 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1065 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1068 const VkBufferMemoryBarrier postCopyBufferBarriers = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1069 transferBuffer.get(), 0ull, bufferSizeComp);
1071 const VkImageMemoryBarrier preCopyImageBarriers = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1072 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1074 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1075 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1078 vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
1080 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1081 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1084 const VkImageMemoryBarrier preShaderImageBarriers[] =
1087 makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1088 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1089 uncompressedImage.get(), subresourceRange),
1091 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1092 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1093 resultImage.get(), subresourceRange),
1095 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1096 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1097 referenceImage.get(), subresourceRange)
1100 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1101 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1102 DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1105 vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1108 const VkImageMemoryBarrier postShaderImageBarriers[] =
1110 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1111 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1112 resultImage.get(), subresourceRange),
1114 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1115 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1116 referenceImage.get(), subresourceRange)
1119 const VkBufferMemoryBarrier preCopyBufferBarrier[] =
1121 makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1122 resultBuffer.get(), 0ull, bufferSize),
1124 makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1125 referenceBuffer.get(), 0ull, bufferSize),
1128 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1129 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(preCopyBufferBarrier), preCopyBufferBarrier,
1130 DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1132 vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, ©Region);
1133 vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, ©Region);
1135 endCommandBuffer(vk, cmdBuffer);
1136 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1138 const Allocation& resultAlloc = resultBuffer.getAllocation();
1139 const Allocation& referenceAlloc = referenceBuffer.getAllocation();
1140 invalidateMappedMemoryRange(vk, device, resultAlloc.getMemory(), resultAlloc.getOffset(), bufferSize);
1141 invalidateMappedMemoryRange(vk, device, referenceAlloc.getMemory(), referenceAlloc.getOffset(), bufferSize);
1143 if (deMemCmp(resultAlloc.getHostPtr(), referenceAlloc.getHostPtr(), (size_t)bufferSize) != 0)
1145 ConstPixelBufferAccess resultPixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1146 ConstPixelBufferAccess referencePixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1148 if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1156 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1159 ImageStoreComputeTestInstance (Context& context,
1160 const TestParameters& parameters);
1162 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
1163 const VkDescriptorSetLayout& descriptorSetLayout,
1164 const VkDescriptorPool& descriptorPool,
1165 vector<ImageData>& imageData);
1169 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1170 :BasicComputeTestInstance (context, parameters)
1174 void ImageStoreComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
1175 const VkDescriptorSetLayout& descriptorSetLayout,
1176 const VkDescriptorPool& descriptorPool,
1177 vector<ImageData>& imageData)
1179 const DeviceInterface& vk = m_context.getDeviceInterface();
1180 const VkDevice device = m_context.getDevice();
1181 const VkQueue queue = m_context.getUniversalQueue();
1182 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1183 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
1184 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
1185 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1186 Move<VkSampler> sampler;
1188 const VkSamplerCreateInfo createInfo =
1190 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1191 DE_NULL, //const void* pNext;
1192 0u, //VkSamplerCreateFlags flags;
1193 VK_FILTER_NEAREST, //VkFilter magFilter;
1194 VK_FILTER_NEAREST, //VkFilter minFilter;
1195 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1196 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1197 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1198 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1199 0.0f, //float mipLodBias;
1200 VK_FALSE, //VkBool32 anisotropyEnable;
1201 1.0f, //float maxAnisotropy;
1202 VK_FALSE, //VkBool32 compareEnable;
1203 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1204 0.0f, //float minLod;
1205 0.0f, //float maxLod;
1206 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1207 VK_TRUE, //VkBool32 unnormalizedCoordinates;
1209 sampler = createSampler(vk, device, &createInfo);
1212 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
1213 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1215 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1216 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1218 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1219 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1223 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1224 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1226 beginCommandBuffer(vk, cmdBuffer);
1228 const VkImageSubresourceRange compressedRange =
1230 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1231 0u, //deUint32 baseMipLevel
1232 imageData[0].getImageInfo(0).mipLevels, //deUint32 levelCount
1233 0u, //deUint32 baseArrayLayer
1234 imageData[0].getImageInfo(0).arrayLayers //deUint32 layerCount
1237 const VkImageSubresourceRange uncompressedRange =
1239 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1240 0u, //deUint32 baseMipLevel
1241 1u, //deUint32 levelCount
1242 0u, //deUint32 baseArrayLayer
1243 1u //deUint32 layerCount
1246 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1248 vector<VkImageMemoryBarrier> preShaderImageBarriers (descriptorSets.size() * 2u + 1u);
1249 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1251 preShaderImageBarriers[imageNdx] = makeImageMemoryBarrier(
1252 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1253 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1254 imageData[1].getImage(imageNdx), uncompressedRange);
1256 preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()] = makeImageMemoryBarrier(
1257 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1258 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1259 imageData[2].getImage(imageNdx), uncompressedRange);
1262 preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1263 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1264 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1265 imageData[0].getImage(0u), compressedRange);
1267 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1268 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1269 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1271 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1273 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1274 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1275 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
1276 imageData[1].getImageInfo(ndx).extent.height,
1277 imageData[1].getImageInfo(ndx).extent.depth);
1280 endCommandBuffer(vk, cmdBuffer);
1281 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1284 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1287 GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters);
1288 virtual TestStatus iterate (void);
1291 virtual bool isWriteToCompressedOperation ();
1292 VkImageCreateInfo makeCreateImageInfo (const VkFormat format,
1293 const ImageType type,
1295 const VkImageUsageFlags usageFlags,
1296 const VkImageCreateFlags* createFlags,
1297 const deUint32 levels,
1298 const deUint32 layers);
1299 VkDeviceSize getCompressedImageData (const VkFormat format,
1301 std::vector<deUint8>& data,
1302 const deUint32 layer,
1303 const deUint32 level);
1304 VkDeviceSize getUncompressedImageData (const VkFormat format,
1306 std::vector<deUint8>& data,
1307 const deUint32 layer,
1308 const deUint32 level);
1309 virtual void prepareData ();
1310 virtual void prepareVertexBuffer ();
1311 virtual void transcodeRead ();
1312 virtual void transcodeWrite ();
1313 bool verifyDecompression (const std::vector<deUint8>& refCompressedData,
1314 const de::MovePtr<Image>& resCompressedImage,
1315 const deUint32 layer,
1316 const deUint32 level,
1317 const UVec3& mipmapDims);
1319 typedef std::vector<deUint8> RawDataVector;
1320 typedef SharedPtr<RawDataVector> RawDataPtr;
1321 typedef std::vector<RawDataPtr> LevelData;
1322 typedef std::vector<LevelData> FullImageData;
1324 FullImageData m_srcData;
1325 FullImageData m_dstData;
1327 typedef SharedPtr<Image> ImagePtr;
1328 typedef std::vector<ImagePtr> LevelImages;
1329 typedef std::vector<LevelImages> ImagesArray;
1331 ImagesArray m_uncompressedImages;
1332 MovePtr<Image> m_compressedImage;
1334 VkImageViewUsageCreateInfoKHR m_imageViewUsageKHR;
1335 VkImageViewUsageCreateInfoKHR* m_srcImageViewUsageKHR;
1336 VkImageViewUsageCreateInfoKHR* m_dstImageViewUsageKHR;
1337 std::vector<tcu::UVec3> m_compressedImageResVec;
1338 std::vector<tcu::UVec3> m_uncompressedImageResVec;
1339 VkFormat m_srcFormat;
1340 VkFormat m_dstFormat;
1341 VkImageUsageFlags m_srcImageUsageFlags;
1342 VkImageUsageFlags m_dstImageUsageFlags;
1343 std::vector<tcu::UVec3> m_srcImageResolutions;
1344 std::vector<tcu::UVec3> m_dstImageResolutions;
1346 MovePtr<Buffer> m_vertexBuffer;
1347 deUint32 m_vertexCount;
1348 VkDeviceSize m_vertexBufferOffset;
1351 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1352 : BasicTranscodingTestInstance(context, parameters)
1355 , m_uncompressedImages()
1356 , m_compressedImage()
1357 , m_imageViewUsageKHR()
1358 , m_srcImageViewUsageKHR()
1359 , m_dstImageViewUsageKHR()
1360 , m_compressedImageResVec()
1361 , m_uncompressedImageResVec()
1364 , m_srcImageUsageFlags()
1365 , m_dstImageUsageFlags()
1366 , m_srcImageResolutions()
1367 , m_dstImageResolutions()
1370 , m_vertexBufferOffset(0ull)
1374 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1377 prepareVertexBuffer();
1379 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1380 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1381 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1383 if (isWriteToCompressedOperation())
1388 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1389 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1390 if (isWriteToCompressedOperation())
1392 if (!verifyDecompression(*m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1393 return TestStatus::fail("Images difference detected");
1397 if (!verifyDecompression(*m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1398 return TestStatus::fail("Images difference detected");
1401 return TestStatus::pass("Pass");
1404 void GraphicsAttachmentsTestInstance::prepareData ()
1406 VkImageViewUsageCreateInfoKHR* imageViewUsageKHRNull = (VkImageViewUsageCreateInfoKHR*)DE_NULL;
1408 m_imageViewUsageKHR = makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1410 m_srcImageViewUsageKHR = isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1411 m_dstImageViewUsageKHR = isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1413 m_srcFormat = isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1414 m_dstFormat = isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1416 m_srcImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1417 m_dstImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1419 m_compressedImageResVec = getMipLevelSizes(getLayerDims());
1420 m_uncompressedImageResVec = getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1422 m_srcImageResolutions = isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1423 m_dstImageResolutions = isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1425 m_srcData.resize(getLevelCount());
1426 m_dstData.resize(getLevelCount());
1427 m_uncompressedImages.resize(getLevelCount());
1429 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1431 m_srcData[levelNdx].resize(getLayerCount());
1432 m_dstData[levelNdx].resize(getLayerCount());
1433 m_uncompressedImages[levelNdx].resize(getLayerCount());
1435 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1437 m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1438 m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1440 if (isWriteToCompressedOperation())
1442 getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1444 m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1448 getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1450 m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1453 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1458 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1460 const DeviceInterface& vk = m_context.getDeviceInterface();
1461 const VkDevice device = m_context.getDevice();
1462 Allocator& allocator = m_context.getDefaultAllocator();
1464 const std::vector<tcu::Vec4> vertexArray = createFullscreenQuad();
1465 const size_t vertexBufferSizeInBytes = vertexArray.size() * sizeof(vertexArray[0]);
1467 m_vertexCount = static_cast<deUint32>(vertexArray.size());
1468 m_vertexBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1470 // Upload vertex data
1471 const Allocation& vertexBufferAlloc = m_vertexBuffer->getAllocation();
1472 deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1473 flushMappedMemoryRange(vk, device, vertexBufferAlloc.getMemory(), vertexBufferAlloc.getOffset(), vertexBufferSizeInBytes);
1476 void GraphicsAttachmentsTestInstance::transcodeRead ()
1478 const DeviceInterface& vk = m_context.getDeviceInterface();
1479 const VkDevice device = m_context.getDevice();
1480 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1481 const VkQueue queue = m_context.getUniversalQueue();
1482 Allocator& allocator = m_context.getDefaultAllocator();
1484 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1486 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1487 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1489 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1490 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1492 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1494 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1495 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1496 .build(vk, device));
1497 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1498 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1499 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1500 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1502 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1503 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1504 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1506 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1507 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1509 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1511 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1512 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1513 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1514 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1515 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1516 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1518 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1520 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1521 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1523 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1524 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1526 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1527 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1528 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
1530 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1532 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1533 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1535 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1537 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1538 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1540 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1541 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1542 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1543 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1544 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1545 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1547 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1548 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1549 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1551 // Upload source image data
1552 const Allocation& alloc = srcImageBuffer->getAllocation();
1553 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1554 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1556 beginCommandBuffer(vk, *cmdBuffer);
1557 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1559 // Copy buffer to image
1560 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1561 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1562 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1564 // Define destination image layout
1565 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1567 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1569 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1570 DescriptorSetUpdateBuilder()
1571 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1572 .update(vk, device);
1574 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1575 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1577 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1578 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1580 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1582 vk.cmdEndRenderPass(*cmdBuffer);
1584 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1585 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1586 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1587 dstImage->get(), dstSubresourceRange);
1589 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1590 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1591 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1593 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1594 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1595 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1597 endCommandBuffer(vk, *cmdBuffer);
1599 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1601 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1602 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1603 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1607 m_compressedImage = srcImage;
1610 void GraphicsAttachmentsTestInstance::transcodeWrite ()
1612 const DeviceInterface& vk = m_context.getDeviceInterface();
1613 const VkDevice device = m_context.getDevice();
1614 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1615 const VkQueue queue = m_context.getUniversalQueue();
1616 Allocator& allocator = m_context.getDefaultAllocator();
1618 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1620 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1621 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1623 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1624 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1626 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1628 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1629 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1630 .build(vk, device));
1631 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1632 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1633 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1634 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1636 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1637 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1638 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1640 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1641 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1643 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1645 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1646 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1647 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1648 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1649 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1650 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1652 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1654 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1655 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1656 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
1658 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1660 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1661 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1663 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1664 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1666 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1667 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1669 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1671 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1672 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1674 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1675 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1676 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1677 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1678 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1679 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1681 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1682 const VkExtent2D framebufferSize (renderSize);
1683 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1685 // Upload source image data
1686 const Allocation& alloc = srcImageBuffer->getAllocation();
1687 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1688 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1690 beginCommandBuffer(vk, *cmdBuffer);
1691 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1693 // Copy buffer to image
1694 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1695 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1696 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1698 // Define destination image layout
1699 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1701 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1703 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1704 DescriptorSetUpdateBuilder()
1705 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1706 .update(vk, device);
1708 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1709 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1711 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1712 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1714 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1716 vk.cmdEndRenderPass(*cmdBuffer);
1718 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1719 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1720 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1721 dstImage->get(), dstSubresourceRange);
1723 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1724 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1725 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1727 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1728 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1729 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1731 endCommandBuffer(vk, *cmdBuffer);
1733 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1735 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1736 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1737 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1741 m_compressedImage = dstImage;
1744 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1746 return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1749 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat format,
1750 const ImageType type,
1752 const VkImageUsageFlags usageFlags,
1753 const VkImageCreateFlags* createFlags,
1754 const deUint32 levels,
1755 const deUint32 layers)
1757 const VkImageType imageType = mapImageType(type);
1758 const VkImageCreateFlags imageCreateFlagsBase = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1759 const VkImageCreateFlags imageCreateFlagsAddOn = isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR : 0;
1760 const VkImageCreateFlags imageCreateFlags = (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1762 const VkImageCreateInfo createImageInfo =
1764 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1765 DE_NULL, // const void* pNext;
1766 imageCreateFlags, // VkImageCreateFlags flags;
1767 imageType, // VkImageType imageType;
1768 format, // VkFormat format;
1769 makeExtent3D(getLayerSize(type, size)), // VkExtent3D extent;
1770 levels, // deUint32 mipLevels;
1771 layers, // deUint32 arrayLayers;
1772 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1773 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1774 usageFlags, // VkImageUsageFlags usage;
1775 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1776 0u, // deUint32 queueFamilyIndexCount;
1777 DE_NULL, // const deUint32* pQueueFamilyIndices;
1778 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1781 return createImageInfo;
1784 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat format,
1786 std::vector<deUint8>& data,
1787 const deUint32 layer,
1788 const deUint32 level)
1790 VkDeviceSize sizeBytes = getCompressedImageSizeInBytes(format, size);
1792 data.resize((size_t)sizeBytes);
1793 generateData(&data[0], data.size(), format, layer, level);
1798 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat format,
1800 std::vector<deUint8>& data,
1801 const deUint32 layer,
1802 const deUint32 level)
1804 tcu::IVec3 sizeAsIVec3 = tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
1805 VkDeviceSize sizeBytes = getImageSizeBytes(sizeAsIVec3, format);
1807 data.resize((size_t)sizeBytes);
1808 generateData(&data[0], data.size(), format, layer, level);
1813 bool GraphicsAttachmentsTestInstance::verifyDecompression (const std::vector<deUint8>& refCompressedData,
1814 const de::MovePtr<Image>& resCompressedImage,
1815 const deUint32 level,
1816 const deUint32 layer,
1817 const UVec3& mipmapDims)
1819 const DeviceInterface& vk = m_context.getDeviceInterface();
1820 const VkDevice device = m_context.getDevice();
1821 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1822 const VkQueue queue = m_context.getUniversalQueue();
1823 Allocator& allocator = m_context.getDefaultAllocator();
1825 const UVec3 mipmapDimsBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
1827 const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1828 const VkImageSubresourceRange resSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
1830 const VkDeviceSize dstBufferSize = getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
1831 const VkImageUsageFlags refSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1833 const VkBufferCreateInfo refSrcImageBufferInfo (makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
1834 const MovePtr<Buffer> refSrcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
1836 const VkImageCreateFlags refSrcImageCreateFlags = 0;
1837 const VkImageCreateInfo refSrcImageCreateInfo = makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1838 const MovePtr<Image> refSrcImage (new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
1839 Move<VkImageView> refSrcImageView (makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
1841 const VkImageUsageFlags resSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1842 const VkImageViewUsageCreateInfoKHR resSrcImageViewUsageKHR = makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
1843 Move<VkImageView> resSrcImageView (makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
1845 const VkImageCreateFlags refDstImageCreateFlags = 0;
1846 const VkImageUsageFlags refDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1847 const VkImageCreateInfo refDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1848 const MovePtr<Image> refDstImage (new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
1849 const Move<VkImageView> refDstImageView (makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1850 const VkImageMemoryBarrier refDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
1851 const VkBufferCreateInfo refDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1852 const MovePtr<Buffer> refDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
1854 const VkImageCreateFlags resDstImageCreateFlags = 0;
1855 const VkImageUsageFlags resDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1856 const VkImageCreateInfo resDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1857 const MovePtr<Image> resDstImage (new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
1858 const Move<VkImageView> resDstImageView (makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1859 const VkImageMemoryBarrier resDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
1860 const VkBufferCreateInfo resDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1861 const MovePtr<Buffer> resDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
1863 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1864 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
1866 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
1868 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1869 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1870 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1871 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1872 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1873 .build(vk, device));
1874 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1875 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1876 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1877 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1878 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1879 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1880 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1881 const VkSamplerCreateInfo refSrcSamplerInfo (makeSamplerCreateInfo());
1882 const Move<VkSampler> refSrcSampler = vk::createSampler(vk, device, &refSrcSamplerInfo);
1883 const VkSamplerCreateInfo resSrcSamplerInfo (makeSamplerCreateInfo());
1884 const Move<VkSampler> resSrcSampler = vk::createSampler(vk, device, &resSrcSamplerInfo);
1885 const VkDescriptorImageInfo descriptorRefSrcImage (makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, VK_IMAGE_LAYOUT_GENERAL));
1886 const VkDescriptorImageInfo descriptorResSrcImage (makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, VK_IMAGE_LAYOUT_GENERAL));
1887 const VkDescriptorImageInfo descriptorRefDstImage (makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1888 const VkDescriptorImageInfo descriptorResDstImage (makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1890 const VkExtent2D renderSize (makeExtent2D(mipmapDims.x(), mipmapDims.y()));
1891 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1892 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
1893 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT, queueFamilyIndex));
1894 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1896 const VkBufferImageCopy copyBufferToImageRegion = makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
1897 const VkBufferImageCopy copyRegion = makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
1898 const VkBufferMemoryBarrier refSrcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
1899 const VkImageMemoryBarrier refSrcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1900 const VkImageMemoryBarrier refSrcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1902 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize, getLayerCount()));
1904 // Upload source image data
1906 const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
1907 deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
1908 flushMappedMemoryRange(vk, device, refSrcImageBufferAlloc.getMemory(), refSrcImageBufferAlloc.getOffset(), refCompressedData.size());
1911 beginCommandBuffer(vk, *cmdBuffer);
1912 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1914 // Copy buffer to image
1915 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
1916 vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, ©BufferToImageRegion);
1917 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
1919 // Make reference and result images readable
1920 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
1921 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
1923 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1925 DescriptorSetUpdateBuilder()
1926 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
1927 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
1928 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
1929 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
1930 .update(vk, device);
1932 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1933 vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1934 vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
1936 vk.cmdEndRenderPass(*cmdBuffer);
1938 // Decompress reference image
1940 const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
1941 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1942 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1943 refDstImage->get(), subresourceRange);
1945 const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
1946 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1947 refDstBuffer->get(), 0ull, dstBufferSize);
1949 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
1950 vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, ©Region);
1951 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1954 // Decompress result image
1956 const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
1957 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1958 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1959 resDstImage->get(), subresourceRange);
1961 const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
1962 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1963 resDstBuffer->get(), 0ull, dstBufferSize);
1965 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
1966 vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, ©Region);
1967 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1970 endCommandBuffer(vk, *cmdBuffer);
1972 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1974 // Compare decompressed pixel data in reference and result images
1976 const Allocation& refDstBufferAlloc = refDstBuffer->getAllocation();
1977 invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
1979 const Allocation& resDstBufferAlloc = resDstBuffer->getAllocation();
1980 invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
1982 if (deMemCmp(refDstBufferAlloc.getHostPtr(), resDstBufferAlloc.getHostPtr(), (size_t)dstBufferSize) != 0)
1984 // Do fuzzy to log error mask
1985 invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
1986 invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
1988 tcu::ConstPixelBufferAccess resPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
1989 tcu::ConstPixelBufferAccess refPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
1991 string comment = string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
1993 if (isWriteToCompressedOperation())
1994 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
1996 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2006 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2009 GraphicsTextureTestInstance (Context& context, const TestParameters& parameters);
2012 virtual bool isWriteToCompressedOperation ();
2013 virtual void transcodeRead ();
2014 virtual void transcodeWrite ();
2017 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2018 : GraphicsAttachmentsTestInstance(context, parameters)
2022 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2024 return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2027 void GraphicsTextureTestInstance::transcodeRead ()
2029 const DeviceInterface& vk = m_context.getDeviceInterface();
2030 const VkDevice device = m_context.getDevice();
2031 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2032 const VkQueue queue = m_context.getUniversalQueue();
2033 Allocator& allocator = m_context.getDefaultAllocator();
2035 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2037 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2038 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2040 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2041 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2043 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2045 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2046 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2047 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2048 .build(vk, device));
2049 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2050 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2051 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2052 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2053 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2055 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2056 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2057 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2059 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2060 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2062 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2064 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2065 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2066 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2067 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2068 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2069 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2071 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2073 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2074 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2076 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2077 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2079 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2080 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2081 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
2083 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2085 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2086 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2088 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2090 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2091 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2093 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2094 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2095 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2096 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2098 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2099 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2100 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2101 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2102 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2103 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2105 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2106 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2108 // Upload source image data
2109 const Allocation& alloc = srcImageBuffer->getAllocation();
2110 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2111 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2113 beginCommandBuffer(vk, *cmdBuffer);
2114 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2116 // Copy buffer to image
2117 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2118 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2119 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2121 // Define destination image layout
2122 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2124 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2126 DescriptorSetUpdateBuilder()
2127 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2128 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2129 .update(vk, device);
2131 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2132 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2134 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2135 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2137 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2139 vk.cmdEndRenderPass(*cmdBuffer);
2141 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2142 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2143 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2144 dstImage->get(), dstSubresourceRange);
2146 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2147 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2148 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2150 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2151 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2152 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2154 endCommandBuffer(vk, *cmdBuffer);
2156 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2158 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2159 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2160 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2164 m_compressedImage = srcImage;
2167 void GraphicsTextureTestInstance::transcodeWrite ()
2169 const DeviceInterface& vk = m_context.getDeviceInterface();
2170 const VkDevice device = m_context.getDevice();
2171 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2172 const VkQueue queue = m_context.getUniversalQueue();
2173 Allocator& allocator = m_context.getDefaultAllocator();
2175 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2177 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2178 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2180 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2181 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2183 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2185 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2186 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2187 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2188 .build(vk, device));
2189 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2190 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2191 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2192 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2193 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2195 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2196 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2197 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2199 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2200 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2202 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2204 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2205 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2206 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2207 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2208 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2209 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2211 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2213 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2214 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2215 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
2217 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2219 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2220 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2222 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2223 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2225 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2226 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2228 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2230 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2231 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2233 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2234 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2235 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2236 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2238 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2239 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2240 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2241 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2242 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2243 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2245 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2246 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2248 // Upload source image data
2249 const Allocation& alloc = srcImageBuffer->getAllocation();
2250 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2251 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2253 beginCommandBuffer(vk, *cmdBuffer);
2254 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2256 // Copy buffer to image
2257 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2258 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2259 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2261 // Define destination image layout
2262 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2264 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2266 DescriptorSetUpdateBuilder()
2267 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2268 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2269 .update(vk, device);
2271 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2272 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2274 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2275 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2277 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2279 vk.cmdEndRenderPass(*cmdBuffer);
2281 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2282 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2283 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2284 dstImage->get(), dstSubresourceRange);
2286 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2287 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2288 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2290 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2291 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2292 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2294 endCommandBuffer(vk, *cmdBuffer);
2296 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2298 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2299 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2300 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2304 m_compressedImage = dstImage;
2307 class TexelViewCompatibleCase : public TestCase
2310 TexelViewCompatibleCase (TestContext& testCtx,
2311 const std::string& name,
2312 const std::string& desc,
2313 const TestParameters& parameters);
2314 void initPrograms (SourceCollections& programCollection) const;
2315 TestInstance* createInstance (Context& context) const;
2317 const TestParameters m_parameters;
2320 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
2321 : TestCase (testCtx, name, desc)
2322 , m_parameters (parameters)
2326 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections& programCollection) const
2328 DE_ASSERT(m_parameters.size.x() > 0);
2329 DE_ASSERT(m_parameters.size.y() > 0);
2331 switch (m_parameters.shader)
2333 case SHADER_TYPE_COMPUTE:
2335 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2336 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2337 std::ostringstream src;
2338 std::ostringstream src_decompress;
2340 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2341 << "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2342 src_decompress << src.str();
2344 switch(m_parameters.operation)
2346 case OPERATION_IMAGE_LOAD:
2348 src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2349 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2350 << "void main (void)\n"
2352 << " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2353 << " imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2359 case OPERATION_TEXEL_FETCH:
2361 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2362 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2363 << "void main (void)\n"
2365 << " ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2366 << " imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n"
2372 case OPERATION_TEXTURE:
2374 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2375 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2376 << "void main (void)\n"
2378 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x - 1, gl_NumWorkGroups.y - 1);\n"
2379 << " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2380 << " const vec2 coord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2381 << " imageStore(u_image1, pos, texture(u_image0, coord));\n"
2387 case OPERATION_IMAGE_STORE:
2389 src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<" u_image0;\n"
2390 << "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image1;\n"
2391 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2392 << "void main (void)\n"
2394 << " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2395 << " imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2396 << " imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2406 src_decompress << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2407 << "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_reference;\n"
2408 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_result;\n"
2409 << "layout (binding = 3, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_reference;\n\n"
2410 << "void main (void)\n"
2412 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2413 << " const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2414 << " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n"
2415 << " imageStore(decompressed_result, pos, texture(compressed_result, cord));\n"
2416 << " imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n"
2418 programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2419 programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2424 case SHADER_TYPE_FRAGMENT:
2426 ImageType imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2430 std::ostringstream src;
2431 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2432 << "layout(location = 0) in vec4 v_in_position;\n"
2434 << "void main (void)\n"
2436 << " gl_Position = v_in_position;\n"
2439 programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2444 switch(m_parameters.operation)
2446 case OPERATION_ATTACHMENT_READ:
2447 case OPERATION_ATTACHMENT_WRITE:
2449 std::ostringstream src;
2451 const std::string dstTypeStr = getGlslFormatType(m_parameters.formatUncompressed);
2452 const std::string srcTypeStr = getGlslInputFormatType(m_parameters.formatUncompressed);
2454 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2455 << "precision highp int;\n"
2456 << "precision highp float;\n"
2458 << "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2459 << "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2461 << "void main (void)\n"
2463 << " o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2466 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2471 case OPERATION_TEXTURE_READ:
2472 case OPERATION_TEXTURE_WRITE:
2474 std::ostringstream src;
2476 const std::string srcSamplerTypeStr = getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2477 const std::string dstImageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2478 const std::string dstFormatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2480 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2481 << "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2482 << "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2484 << "void main (void)\n"
2486 << " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2487 << " const ivec2 pixels_resolution = ivec2(textureSize(u_imageIn, 0)) - ivec2(1,1);\n"
2488 << " const vec2 in_pos = vec2(out_pos) / vec2(pixels_resolution);\n"
2489 << " imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2492 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2502 // Verification fragment shader
2504 std::ostringstream src;
2506 const std::string samplerType = getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2507 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2508 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2510 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2511 << "layout (binding = 0) uniform " << samplerType << " u_imageIn0;\n"
2512 << "layout (binding = 1) uniform " << samplerType << " u_imageIn1;\n"
2513 << "layout (binding = 2, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut0;\n"
2514 << "layout (binding = 3, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut1;\n"
2516 << "void main (void)\n"
2518 << " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2520 << " const ivec2 pixels_resolution0 = ivec2(textureSize(u_imageIn0, 0)) - ivec2(1,1);\n"
2521 << " const vec2 in_pos0 = vec2(out_pos) / vec2(pixels_resolution0);\n"
2522 << " imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2524 << " const ivec2 pixels_resolution1 = ivec2(textureSize(u_imageIn1, 0)) - ivec2(1,1);\n"
2525 << " const vec2 in_pos1 = vec2(out_pos) / vec2(pixels_resolution1);\n"
2526 << " imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2529 programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2540 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2542 const VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
2543 const InstanceInterface& vk = context.getInstanceInterface();
2545 if (!m_parameters.useMipmaps)
2547 DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size) == 1u);
2548 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).z() == 1u);
2551 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() > 0u);
2552 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() > 0u);
2554 if (std::find(context.getDeviceExtensions().begin(), context.getDeviceExtensions().end(), "VK_KHR_maintenance2") == context.getDeviceExtensions().end())
2555 TCU_THROW(NotSupportedError, "Extension VK_KHR_maintenance2 not supported");
2558 VkImageFormatProperties imageFormatProperties;
2560 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2561 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2562 m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties))
2563 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2565 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2566 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2567 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2568 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
2569 &imageFormatProperties))
2570 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2574 const VkPhysicalDeviceFeatures physicalDeviceFeatures = getPhysicalDeviceFeatures (vk, physicalDevice);
2576 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2577 !physicalDeviceFeatures.textureCompressionBC)
2578 TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2580 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2581 !physicalDeviceFeatures.textureCompressionETC2)
2582 TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2584 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK) &&
2585 !physicalDeviceFeatures.textureCompressionASTC_LDR)
2586 TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2589 switch (m_parameters.shader)
2591 case SHADER_TYPE_COMPUTE:
2593 switch (m_parameters.operation)
2595 case OPERATION_IMAGE_LOAD:
2596 case OPERATION_TEXEL_FETCH:
2597 case OPERATION_TEXTURE:
2598 return new BasicComputeTestInstance(context, m_parameters);
2599 case OPERATION_IMAGE_STORE:
2600 return new ImageStoreComputeTestInstance(context, m_parameters);
2602 TCU_THROW(InternalError, "Impossible");
2606 case SHADER_TYPE_FRAGMENT:
2608 switch (m_parameters.operation)
2610 case OPERATION_ATTACHMENT_READ:
2611 case OPERATION_ATTACHMENT_WRITE:
2612 return new GraphicsAttachmentsTestInstance(context, m_parameters);
2614 case OPERATION_TEXTURE_READ:
2615 case OPERATION_TEXTURE_WRITE:
2616 return new GraphicsTextureTestInstance(context, m_parameters);
2619 TCU_THROW(InternalError, "Impossible");
2624 TCU_THROW(InternalError, "Impossible");
2630 static tcu::UVec3 getUnniceResolution(const VkFormat format, const deUint32 layers)
2632 const deUint32 unniceMipmapTextureSize[] = { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2633 const deUint32 baseTextureWidth = unniceMipmapTextureSize[getBlockWidth(format)];
2634 const deUint32 baseTextureHeight = unniceMipmapTextureSize[getBlockHeight(format)];
2635 const deUint32 baseTextureWidthLevels = deLog2Floor32(baseTextureWidth);
2636 const deUint32 baseTextureHeightLevels = deLog2Floor32(baseTextureHeight);
2637 const deUint32 widthMultiplier = (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2638 const deUint32 heightMultiplier = (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2639 const deUint32 width = baseTextureWidth * widthMultiplier;
2640 const deUint32 height = baseTextureHeight * heightMultiplier;
2642 // Number of levels should be same on both axises
2643 DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
2645 return tcu::UVec3(width, height, layers);
2648 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
2652 const VkFormat* formats;
2656 const bool mipmapness[] =
2662 const std::string pipelineName[SHADER_TYPE_LAST] =
2668 const std::string mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)] =
2674 const std::string operationName[OPERATION_LAST] =
2686 const VkImageUsageFlags baseImageUsageFlagSet = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2687 const VkImageUsageFlags compressedImageUsageFlags[OPERATION_LAST] =
2689 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), // "image_load"
2690 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texel_fetch"
2691 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture"
2692 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "image_store"
2693 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), // "attachment_read"
2694 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT), // "attachment_write"
2695 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), // "texture_read"
2696 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture_write"
2699 const VkImageUsageFlags compressedImageViewUsageFlags[OPERATION_LAST] =
2701 compressedImageUsageFlags[0], //"image_load"
2702 compressedImageUsageFlags[1], //"texel_fetch"
2703 compressedImageUsageFlags[2], //"texture"
2704 compressedImageUsageFlags[3], //"image_store"
2705 compressedImageUsageFlags[4], //"attachment_read"
2706 compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, //"attachment_write"
2707 compressedImageUsageFlags[6], //"texture_read"
2708 compressedImageUsageFlags[7], //"texture_write"
2711 const VkImageUsageFlags uncompressedImageUsageFlags[OPERATION_LAST] =
2713 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), //"image_load"
2714 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texel_fetch"
2715 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texture"
2716 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"image_store"
2717 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"attachment_read"
2718 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT), //"attachment_write"
2719 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_read"
2720 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_write"
2723 const VkFormat compressedFormats64bit[] =
2725 VK_FORMAT_BC1_RGB_UNORM_BLOCK,
2726 VK_FORMAT_BC1_RGB_SRGB_BLOCK,
2727 VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
2728 VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
2729 VK_FORMAT_BC4_UNORM_BLOCK,
2730 VK_FORMAT_BC4_SNORM_BLOCK,
2731 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
2732 VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
2733 VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
2734 VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
2735 VK_FORMAT_EAC_R11_UNORM_BLOCK,
2736 VK_FORMAT_EAC_R11_SNORM_BLOCK,
2739 const VkFormat compressedFormats128bit[] =
2741 VK_FORMAT_BC2_UNORM_BLOCK,
2742 VK_FORMAT_BC2_SRGB_BLOCK,
2743 VK_FORMAT_BC3_UNORM_BLOCK,
2744 VK_FORMAT_BC3_SRGB_BLOCK,
2745 VK_FORMAT_BC5_UNORM_BLOCK,
2746 VK_FORMAT_BC5_SNORM_BLOCK,
2747 VK_FORMAT_BC6H_UFLOAT_BLOCK,
2748 VK_FORMAT_BC6H_SFLOAT_BLOCK,
2749 VK_FORMAT_BC7_UNORM_BLOCK,
2750 VK_FORMAT_BC7_SRGB_BLOCK,
2751 VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
2752 VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
2753 VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
2754 VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
2755 VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
2756 VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
2757 VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
2758 VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
2759 VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
2760 VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
2761 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
2762 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
2763 VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
2764 VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
2765 VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
2766 VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
2767 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
2768 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
2769 VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
2770 VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
2771 VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
2772 VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
2773 VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
2774 VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
2775 VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
2776 VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
2777 VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
2778 VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
2779 VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
2780 VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
2781 VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
2782 VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
2785 const VkFormat uncompressedFormats64bit[] =
2787 VK_FORMAT_R16G16B16A16_UNORM,
2788 VK_FORMAT_R16G16B16A16_SNORM,
2789 VK_FORMAT_R16G16B16A16_USCALED,
2790 VK_FORMAT_R16G16B16A16_SSCALED,
2791 VK_FORMAT_R16G16B16A16_UINT,
2792 VK_FORMAT_R16G16B16A16_SINT,
2793 VK_FORMAT_R16G16B16A16_SFLOAT,
2794 VK_FORMAT_R32G32_UINT,
2795 VK_FORMAT_R32G32_SINT,
2796 VK_FORMAT_R32G32_SFLOAT,
2797 //VK_FORMAT_R64_UINT, remove from the test it couln'd not be use
2798 //VK_FORMAT_R64_SINT, remove from the test it couln'd not be use
2799 //VK_FORMAT_R64_SFLOAT, remove from the test it couln'd not be use
2802 const VkFormat uncompressedFormats128bit[] =
2804 VK_FORMAT_R32G32B32A32_UINT,
2805 VK_FORMAT_R32G32B32A32_SINT,
2806 VK_FORMAT_R32G32B32A32_SFLOAT,
2807 //VK_FORMAT_R64G64_UINT, remove from the test it couln'd not be use
2808 //VK_FORMAT_R64G64_SINT, remove from the test it couln'd not be use
2809 //VK_FORMAT_R64G64_SFLOAT, remove from the test it couln'd not be use
2812 const FormatsArray formatsCompressedSets[] =
2815 compressedFormats64bit,
2816 DE_LENGTH_OF_ARRAY(compressedFormats64bit)
2819 compressedFormats128bit,
2820 DE_LENGTH_OF_ARRAY(compressedFormats128bit)
2824 const FormatsArray formatsUncompressedSets[] =
2827 uncompressedFormats64bit,
2828 DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
2831 uncompressedFormats128bit,
2832 DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
2836 DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
2838 MovePtr<tcu::TestCaseGroup> texelViewCompatibleTests (new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
2840 for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
2842 MovePtr<tcu::TestCaseGroup> pipelineTypeGroup (new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
2844 for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
2846 const bool mipmapTest = mipmapness[mipmapTestNdx];
2848 MovePtr<tcu::TestCaseGroup> mipmapTypeGroup (new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
2850 for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
2852 if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
2855 if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
2858 MovePtr<tcu::TestCaseGroup> imageOperationGroup (new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
2860 // Iterate through bitness groups (64 bit, 128 bit, etc)
2861 for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
2863 for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
2865 const VkFormat formatCompressed = formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
2866 const std::string compressedFormatGroupName = getFormatShortString(formatCompressed);
2867 MovePtr<tcu::TestCaseGroup> compressedFormatGroup (new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
2869 for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
2871 const VkFormat formatUncompressed = formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
2872 const std::string uncompressedFormatGroupName = getFormatShortString(formatUncompressed);
2873 const TestParameters parameters =
2875 static_cast<Operation>(operationNdx),
2876 static_cast<ShaderType>(shaderType),
2877 mipmapTest ? getUnniceResolution(formatCompressed, 3u) : UVec3(64u, 64u, 1u),
2881 (operationNdx == OPERATION_IMAGE_STORE) ? 3u : 2u,
2882 compressedImageUsageFlags[operationNdx],
2883 compressedImageViewUsageFlags[operationNdx],
2884 uncompressedImageUsageFlags[operationNdx],
2886 VK_FORMAT_R8G8B8A8_UNORM
2889 compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
2892 imageOperationGroup->addChild(compressedFormatGroup.release());
2896 mipmapTypeGroup->addChild(imageOperationGroup.release());
2899 pipelineTypeGroup->addChild(mipmapTypeGroup.release());
2902 texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
2905 return texelViewCompatibleTests.release();