1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2017 The Khronos Group Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
20 * \file vktImageCompressionTranscodingSupport.cpp
21 * \brief Compression transcoding support
22 *//*--------------------------------------------------------------------*/
24 #include "vktImageCompressionTranscodingSupport.hpp"
26 #include "deUniquePtr.hpp"
27 #include "deStringUtil.hpp"
28 #include "deSharedPtr.hpp"
29 #include "deRandom.hpp"
31 #include "vktTestCaseUtil.hpp"
32 #include "vkPrograms.hpp"
33 #include "vkImageUtil.hpp"
34 #include "vktImageTestsUtil.hpp"
35 #include "vkBuilderUtil.hpp"
37 #include "vkRefUtil.hpp"
38 #include "vkTypeUtil.hpp"
39 #include "vkQueryUtil.hpp"
41 #include "tcuTextureUtil.hpp"
42 #include "tcuTexture.hpp"
43 #include "tcuCompressedTexture.hpp"
44 #include "tcuVectorType.hpp"
45 #include "tcuResource.hpp"
46 #include "tcuImageIO.hpp"
47 #include "tcuImageCompare.hpp"
48 #include "tcuTestLog.hpp"
49 #include "tcuRGBA.hpp"
50 #include "tcuSurface.hpp"
62 using tcu::TestContext;
63 using tcu::TestStatus;
66 using tcu::CompressedTexFormat;
67 using tcu::CompressedTexture;
70 using tcu::ConstPixelBufferAccess;
75 typedef SharedPtr<MovePtr<Image> > ImageSp;
76 typedef SharedPtr<Move<VkImageView> > ImageViewSp;
77 typedef SharedPtr<Move<VkDescriptorSet> > SharedVkDescriptorSet;
89 OPERATION_TEXEL_FETCH,
91 OPERATION_IMAGE_STORE,
92 OPERATION_ATTACHMENT_READ,
93 OPERATION_ATTACHMENT_WRITE,
94 OPERATION_TEXTURE_READ,
95 OPERATION_TEXTURE_WRITE,
105 VkFormat formatCompressed;
106 VkFormat formatUncompressed;
107 deUint32 imagesCount;
108 VkImageUsageFlags compressedImageUsage;
109 VkImageUsageFlags compressedImageViewUsage;
110 VkImageUsageFlags uncompressedImageUsage;
112 VkFormat formatForVerify;
116 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
118 return SharedPtr<Move<T> >(new Move<T>(move));
122 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
124 return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
127 const deUint32 SINGLE_LEVEL = 1u;
128 const deUint32 SINGLE_LAYER = 1u;
130 class BasicTranscodingTestInstance : public TestInstance
133 BasicTranscodingTestInstance (Context& context,
134 const TestParameters& parameters);
135 virtual TestStatus iterate (void) = 0;
137 void generateData (deUint8* toFill,
139 const VkFormat format,
140 const deUint32 layer = 0u,
141 const deUint32 level = 0u);
142 deUint32 getLevelCount ();
143 deUint32 getLayerCount ();
144 UVec3 getLayerDims ();
145 vector<UVec3> getMipLevelSizes (UVec3 baseSize);
146 vector<UVec3> getCompressedMipLevelSizes (const VkFormat compressedFormat,
147 const vector<UVec3>& uncompressedSizes);
149 const TestParameters m_parameters;
150 const deUint32 m_blockWidth;
151 const deUint32 m_blockHeight;
152 const deUint32 m_levelCount;
153 const UVec3 m_layerSize;
156 deUint32 findMipMapLevelCount ();
159 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
161 deUint32 levelCount = 1;
163 // We cannot use mipmap levels which have resolution below block size.
164 // Reduce number of mipmap levels
165 if (m_parameters.useMipmaps)
167 deUint32 w = m_parameters.size.x();
168 deUint32 h = m_parameters.size.y();
170 DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
172 while (w > m_blockWidth && h > m_blockHeight)
177 if (w > m_blockWidth && h > m_blockHeight)
181 DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
182 DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
188 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
189 : TestInstance (context)
190 , m_parameters (parameters)
191 , m_blockWidth (getBlockWidth(m_parameters.formatCompressed))
192 , m_blockHeight (getBlockHeight(m_parameters.formatCompressed))
193 , m_levelCount (findMipMapLevelCount())
194 , m_layerSize (getLayerSize(m_parameters.imageType, m_parameters.size))
196 DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
199 deUint32 BasicTranscodingTestInstance::getLevelCount()
204 deUint32 BasicTranscodingTestInstance::getLayerCount()
206 return m_parameters.size.z();
209 UVec3 BasicTranscodingTestInstance::getLayerDims()
214 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
216 vector<UVec3> levelSizes;
217 const deUint32 levelCount = getLevelCount();
219 DE_ASSERT(m_parameters.imageType == IMAGE_TYPE_2D || m_parameters.imageType == IMAGE_TYPE_2D_ARRAY);
223 levelSizes.push_back(baseSize);
225 while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
227 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
228 baseSize.y() = deMax32(baseSize.y() >> 1, 1);
229 levelSizes.push_back(baseSize);
232 DE_ASSERT(levelSizes.size() == getLevelCount());
237 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
239 vector<UVec3> levelSizes;
240 vector<UVec3>::const_iterator it;
242 for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
243 levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
248 void BasicTranscodingTestInstance::generateData (deUint8* toFill,
250 const VkFormat format,
251 const deUint32 layer,
252 const deUint32 level)
254 const deUint8 pattern[] =
257 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
258 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
259 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
260 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
261 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
262 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
263 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
264 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
265 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
266 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
267 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Positive infinity
268 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Negative infinity
269 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
270 0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
271 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
272 0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
273 0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
274 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
275 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
276 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
278 0x7F, 0x80, 0x00, 0x00, // Positive infinity
279 0xFF, 0x80, 0x00, 0x00, // Negative infinity
280 0x7F, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
281 0x7F, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
282 0xFF, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
283 0xFF, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
284 0x7F, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
285 0x7F, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
286 0xFF, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
287 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
288 0xAA, 0xAA, 0xAA, 0xAA,
289 0x55, 0x55, 0x55, 0x55,
292 deUint8* start = toFill;
293 size_t sizeToRnd = size;
296 if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
299 for (size_t i = 0; i < sizeof(pattern); i++)
300 start[sizeof(pattern) - i - 1] = pattern[i];
302 start += sizeof(pattern);
303 sizeToRnd -= sizeof(pattern);
306 deMemcpy(start, pattern, sizeof(pattern));
308 start += sizeof(pattern);
309 sizeToRnd -= sizeof(pattern);
314 DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
316 deUint32* start32 = reinterpret_cast<deUint32*>(start);
317 size_t sizeToRnd32 = sizeToRnd / sizeof(deUint32);
318 deUint32 seed = (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
321 for (size_t i = 0; i < sizeToRnd32; i++)
322 start32[i] = rnd.getUint32();
326 // Remove certain values that may not be preserved based on the uncompressed view format
327 if (isSnormFormat(m_parameters.formatUncompressed))
329 for (size_t i = 0; i < size; i += 2)
331 // SNORM fix: due to write operation in SNORM format
332 // replaces 0x00 0x80 to 0x01 0x80
333 if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
337 else if (isFloatFormat(m_parameters.formatUncompressed))
339 tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
341 if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
343 for (size_t i = 0; i < size; i += 2)
345 // HALF_FLOAT fix: remove INF and NaN
346 if ((toFill[i+1] & 0x7C) == 0x7C)
350 else if (textureFormat.type == tcu::TextureFormat::FLOAT)
352 for (size_t i = 0; i < size; i += 4)
354 // HALF_FLOAT fix: remove INF and NaN
355 if ((toFill[i+1] & 0x7C) == 0x7C)
359 for (size_t i = 0; i < size; i += 4)
361 // FLOAT fix: remove INF, NaN, and denorm
363 if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
366 if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
374 class BasicComputeTestInstance : public BasicTranscodingTestInstance
377 BasicComputeTestInstance (Context& context,
378 const TestParameters& parameters);
379 TestStatus iterate (void);
383 deUint32 getImagesCount (void) { return static_cast<deUint32>(images.size()); }
384 deUint32 getImageViewCount (void) { return static_cast<deUint32>(imagesViews.size()); }
385 deUint32 getImageInfoCount (void) { return static_cast<deUint32>(imagesInfos.size()); }
386 VkImage getImage (const deUint32 ndx) { return **images[ndx]->get(); }
387 VkImageView getImageView (const deUint32 ndx) { return **imagesViews[ndx]; }
388 VkImageCreateInfo getImageInfo (const deUint32 ndx) { return imagesInfos[ndx]; }
389 void addImage (MovePtr<Image> image) { images.push_back(makeVkSharedPtr(image)); }
390 void addImageView (Move<VkImageView> imageView) { imagesViews.push_back(makeVkSharedPtr(imageView));}
391 void addImageInfo (const VkImageCreateInfo imageInfo) { imagesInfos.push_back(imageInfo); }
392 void resetViews () { imagesViews.clear(); }
394 vector<ImageSp> images;
395 vector<ImageViewSp> imagesViews;
396 vector<VkImageCreateInfo> imagesInfos;
398 void copyDataToImage (const VkCommandBuffer& cmdBuffer,
399 ImageData& imageData,
400 const vector<UVec3>& mipMapSizes,
401 const bool isCompressed);
402 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
403 const VkDescriptorSetLayout& descriptorSetLayout,
404 const VkDescriptorPool& descriptorPool,
405 vector<ImageData>& imageData);
406 bool copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
407 const VkImage& uncompressed,
408 const VkDeviceSize offset,
410 void descriptorSetUpdate (VkDescriptorSet descriptorSet,
411 const VkDescriptorImageInfo* descriptorImageInfos);
412 void createImageInfos (ImageData& imageData,
413 const vector<UVec3>& mipMapSizes,
414 const bool isCompressed);
415 bool decompressImage (const VkCommandBuffer& cmdBuffer,
416 vector<ImageData>& imageData,
417 const vector<UVec3>& mipMapSizes);
418 vector<deUint8> m_data;
422 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
423 :BasicTranscodingTestInstance (context, parameters)
427 TestStatus BasicComputeTestInstance::iterate (void)
429 const DeviceInterface& vk = m_context.getDeviceInterface();
430 const VkDevice device = m_context.getDevice();
431 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
432 Allocator& allocator = m_context.getDefaultAllocator();
433 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
434 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
435 const vector<UVec3> mipMapSizes = m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, m_parameters.size);
436 vector<ImageData> imageData (m_parameters.imagesCount);
437 const deUint32 compressedNdx = 0u;
438 const deUint32 resultImageNdx = m_parameters.imagesCount -1u;
440 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
442 const bool isCompressed = compressedNdx == imageNdx ? true : false;
443 createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
444 for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
446 imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
449 const VkImageViewUsageCreateInfoKHR imageViewUsageKHR =
451 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
452 DE_NULL, //const void* pNext;
453 m_parameters.compressedImageUsage, //VkImageUsageFlags usage;
455 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
456 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
458 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
459 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
460 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
461 &imageViewUsageKHR));
466 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
467 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
468 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
475 for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
477 size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
480 generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
483 switch(m_parameters.operation)
485 case OPERATION_IMAGE_LOAD:
486 case OPERATION_TEXEL_FETCH:
487 case OPERATION_TEXTURE:
488 copyDataToImage(*cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
490 case OPERATION_IMAGE_STORE:
491 copyDataToImage(*cmdBuffer, imageData[1], mipMapSizes, false);
499 Move<VkDescriptorSetLayout> descriptorSetLayout;
500 Move<VkDescriptorPool> descriptorPool;
502 DescriptorSetLayoutBuilder descriptorSetLayoutBuilder;
503 DescriptorPoolBuilder descriptorPoolBuilder;
504 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
506 switch(m_parameters.operation)
508 case OPERATION_IMAGE_LOAD:
509 case OPERATION_IMAGE_STORE:
510 descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
511 descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
513 case OPERATION_TEXEL_FETCH:
514 case OPERATION_TEXTURE:
515 descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
516 descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
523 descriptorSetLayout = descriptorSetLayoutBuilder.build(vk, device);
524 descriptorPool = descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
525 executeShader(*cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
528 VkDeviceSize offset = 0ull;
529 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
530 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
532 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
533 const UVec3 size = UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
534 imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
535 imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
536 if (!copyResultAndCompare(*cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
537 return TestStatus::fail("Fail");
538 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
542 if (!decompressImage(*cmdBuffer, imageData, mipMapSizes))
543 return TestStatus::fail("Fail");
544 return TestStatus::pass("Pass");
547 void BasicComputeTestInstance::copyDataToImage (const VkCommandBuffer& cmdBuffer,
548 ImageData& imageData,
549 const vector<UVec3>& mipMapSizes,
550 const bool isCompressed)
552 const DeviceInterface& vk = m_context.getDeviceInterface();
553 const VkDevice device = m_context.getDevice();
554 const VkQueue queue = m_context.getUniversalQueue();
555 Allocator& allocator = m_context.getDefaultAllocator();
557 Buffer imageBuffer (vk, device, allocator,
558 makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
559 MemoryRequirement::HostVisible);
560 VkDeviceSize offset = 0ull;
562 const Allocation& alloc = imageBuffer.getAllocation();
563 deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
564 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_data.size());
567 beginCommandBuffer(vk, cmdBuffer);
568 const VkImageSubresourceRange subresourceRange =
570 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
571 0u, //deUint32 baseMipLevel
572 imageData.getImageInfo(0u).mipLevels, //deUint32 levelCount
573 0u, //deUint32 baseArrayLayer
574 imageData.getImageInfo(0u).arrayLayers //deUint32 layerCount
577 for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
579 const VkImageMemoryBarrier preCopyImageBarrier = makeImageMemoryBarrier(
580 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
581 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
582 imageData.getImage(imageNdx), subresourceRange);
584 const VkBufferMemoryBarrier FlushHostCopyBarrier = makeBufferMemoryBarrier(
585 VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
586 imageBuffer.get(), 0ull, m_data.size());
588 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
589 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
591 for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
593 const VkExtent3D imageExtent = isCompressed ?
594 makeExtent3D(mipMapSizes[mipNdx]) :
595 imageData.getImageInfo(imageNdx).extent;
596 const VkBufferImageCopy copyRegion =
598 offset, //VkDeviceSize bufferOffset;
599 0u, //deUint32 bufferRowLength;
600 0u, //deUint32 bufferImageHeight;
601 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers), //VkImageSubresourceLayers imageSubresource;
602 makeOffset3D(0, 0, 0), //VkOffset3D imageOffset;
603 imageExtent, //VkExtent3D imageExtent;
606 vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
607 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
608 UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
609 imageData.getImageInfo(imageNdx).arrayLayers;
612 endCommandBuffer(vk, cmdBuffer);
613 submitCommandsAndWait(vk, device, queue, cmdBuffer);
616 void BasicComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
617 const VkDescriptorSetLayout& descriptorSetLayout,
618 const VkDescriptorPool& descriptorPool,
619 vector<ImageData>& imageData)
621 const DeviceInterface& vk = m_context.getDeviceInterface();
622 const VkDevice device = m_context.getDevice();
623 const VkQueue queue = m_context.getUniversalQueue();
624 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
625 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
626 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
627 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
628 Move<VkSampler> sampler;
630 const VkSamplerCreateInfo createInfo =
632 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
633 DE_NULL, //const void* pNext;
634 0u, //VkSamplerCreateFlags flags;
635 VK_FILTER_NEAREST, //VkFilter magFilter;
636 VK_FILTER_NEAREST, //VkFilter minFilter;
637 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
638 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
639 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
640 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
641 0.0f, //float mipLodBias;
642 VK_FALSE, //VkBool32 anisotropyEnable;
643 1.0f, //float maxAnisotropy;
644 VK_FALSE, //VkBool32 compareEnable;
645 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
646 0.0f, //float minLod;
647 0.0f, //float maxLod;
648 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
649 VK_FALSE, //VkBool32 unnormalizedCoordinates;
651 sampler = createSampler(vk, device, &createInfo);
654 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
655 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
657 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
658 for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
660 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
661 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
665 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
666 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
668 beginCommandBuffer(vk, cmdBuffer);
670 const VkImageSubresourceRange compressedRange =
672 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
673 0u, //deUint32 baseMipLevel
674 imageData[0].getImageInfo(0u).mipLevels, //deUint32 levelCount
675 0u, //deUint32 baseArrayLayer
676 imageData[0].getImageInfo(0u).arrayLayers //deUint32 layerCount
678 const VkImageSubresourceRange uncompressedRange =
680 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
681 0u, //deUint32 baseMipLevel
682 1u, //deUint32 levelCount
683 0u, //deUint32 baseArrayLayer
684 1u //deUint32 layerCount
687 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
689 vector<VkImageMemoryBarrier> preShaderImageBarriers;
690 preShaderImageBarriers.resize(descriptorSets.size() + 1u);
691 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
693 preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
694 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
695 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
696 imageData[1].getImage(imageNdx), uncompressedRange);
699 preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
700 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
701 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
702 imageData[0].getImage(0), compressedRange);
704 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
705 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
706 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
708 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
710 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
711 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
712 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
713 imageData[1].getImageInfo(ndx).extent.height,
714 imageData[1].getImageInfo(ndx).extent.depth);
717 endCommandBuffer(vk, cmdBuffer);
718 submitCommandsAndWait(vk, device, queue, cmdBuffer);
721 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
722 const VkImage& uncompressed,
723 const VkDeviceSize offset,
726 const DeviceInterface& vk = m_context.getDeviceInterface();
727 const VkQueue queue = m_context.getUniversalQueue();
728 const VkDevice device = m_context.getDevice();
729 Allocator& allocator = m_context.getDefaultAllocator();
731 VkDeviceSize imageResultSize = getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
732 Buffer imageBufferResult (vk, device, allocator,
733 makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
734 MemoryRequirement::HostVisible);
736 beginCommandBuffer(vk, cmdBuffer);
738 const VkImageSubresourceRange subresourceRange =
740 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
741 0u, //deUint32 baseMipLevel
742 1u, //deUint32 levelCount
743 0u, //deUint32 baseArrayLayer
744 1u //deUint32 layerCount
747 const VkBufferImageCopy copyRegion =
749 0ull, // VkDeviceSize bufferOffset;
750 0u, // deUint32 bufferRowLength;
751 0u, // deUint32 bufferImageHeight;
752 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
753 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
754 makeExtent3D(size), // VkExtent3D imageExtent;
757 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
758 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
759 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
760 uncompressed, subresourceRange);
762 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
763 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
764 imageBufferResult.get(), 0ull, imageResultSize);
766 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
767 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, ©Region);
768 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
770 endCommandBuffer(vk, cmdBuffer);
771 submitCommandsAndWait(vk, device, queue, cmdBuffer);
773 const Allocation& allocResult = imageBufferResult.getAllocation();
774 invalidateMappedMemoryRange(vk, device, allocResult.getMemory(), allocResult.getOffset(), imageResultSize);
775 if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
780 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
782 const DeviceInterface& vk = m_context.getDeviceInterface();
783 const VkDevice device = m_context.getDevice();
784 DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
786 switch(m_parameters.operation)
788 case OPERATION_IMAGE_LOAD:
789 case OPERATION_IMAGE_STORE:
791 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
792 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
797 case OPERATION_TEXEL_FETCH:
798 case OPERATION_TEXTURE:
800 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
802 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
803 bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
812 descriptorSetUpdateBuilder.update(vk, device);
815 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
817 const VkImageType imageType = mapImageType(m_parameters.imageType);
821 const VkExtent3D extentCompressed = makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
822 const VkImageCreateInfo compressedInfo =
824 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
825 DE_NULL, // const void* pNext;
826 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
827 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR |
828 VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, // VkImageCreateFlags flags;
829 imageType, // VkImageType imageType;
830 m_parameters.formatCompressed, // VkFormat format;
831 extentCompressed, // VkExtent3D extent;
832 static_cast<deUint32>(mipMapSizes.size()), // deUint32 mipLevels;
833 getLayerCount(), // deUint32 arrayLayers;
834 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
835 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
836 VK_IMAGE_USAGE_SAMPLED_BIT |
837 VK_IMAGE_USAGE_STORAGE_BIT |
838 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
839 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
840 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
841 0u, // deUint32 queueFamilyIndexCount;
842 DE_NULL, // const deUint32* pQueueFamilyIndices;
843 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
845 imageData.addImageInfo(compressedInfo);
849 for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
850 for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
852 const VkExtent3D extentUncompressed = m_parameters.useMipmaps ?
853 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
854 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, m_parameters.size));
855 const VkImageCreateInfo uncompressedInfo =
857 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
858 DE_NULL, // const void* pNext;
859 0u, // VkImageCreateFlags flags;
860 imageType, // VkImageType imageType;
861 m_parameters.formatUncompressed, // VkFormat format;
862 extentUncompressed, // VkExtent3D extent;
863 1u, // deUint32 mipLevels;
864 1u, // deUint32 arrayLayers;
865 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
866 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
867 m_parameters.uncompressedImageUsage |
868 VK_IMAGE_USAGE_SAMPLED_BIT, // VkImageUsageFlags usage;
869 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
870 0u, // deUint32 queueFamilyIndexCount;
871 DE_NULL, // const deUint32* pQueueFamilyIndices;
872 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
874 imageData.addImageInfo(uncompressedInfo);
879 bool BasicComputeTestInstance::decompressImage (const VkCommandBuffer& cmdBuffer,
880 vector<ImageData>& imageData,
881 const vector<UVec3>& mipMapSizes)
883 const DeviceInterface& vk = m_context.getDeviceInterface();
884 const VkDevice device = m_context.getDevice();
885 const VkQueue queue = m_context.getUniversalQueue();
886 Allocator& allocator = m_context.getDefaultAllocator();
887 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
888 const VkImage& compressed = imageData[0].getImage(0);
890 for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
891 imageData[ndx].resetViews();
893 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
894 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
896 const bool layoutShaderReadOnly = (layerNdx % 2u) == 1;
897 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
898 const VkExtent3D extentCompressed = makeExtent3D(mipMapSizes[mipNdx]);
899 const VkImage& uncompressed = imageData[m_parameters.imagesCount -1].getImage(imageNdx);
900 const VkExtent3D extentUncompressed = imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
901 const VkDeviceSize bufferSizeComp = getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
903 const VkImageCreateInfo decompressedImageInfo =
905 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
906 DE_NULL, // const void* pNext;
907 0u, // VkImageCreateFlags flags;
908 VK_IMAGE_TYPE_2D, // VkImageType imageType;
909 VK_FORMAT_R8G8B8A8_UNORM, // VkFormat format;
910 extentCompressed, // VkExtent3D extent;
911 1u, // deUint32 mipLevels;
912 1u, // deUint32 arrayLayers;
913 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
914 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
915 VK_IMAGE_USAGE_SAMPLED_BIT |
916 VK_IMAGE_USAGE_STORAGE_BIT |
917 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
918 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
919 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
920 0u, // deUint32 queueFamilyIndexCount;
921 DE_NULL, // const deUint32* pQueueFamilyIndices;
922 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
925 const VkImageCreateInfo compressedImageInfo =
927 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
928 DE_NULL, // const void* pNext;
929 0u, // VkImageCreateFlags flags;
930 VK_IMAGE_TYPE_2D, // VkImageType imageType;
931 m_parameters.formatCompressed, // VkFormat format;
932 extentCompressed, // VkExtent3D extent;
933 1u, // deUint32 mipLevels;
934 1u, // deUint32 arrayLayers;
935 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
936 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
937 VK_IMAGE_USAGE_SAMPLED_BIT |
938 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
939 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
940 0u, // deUint32 queueFamilyIndexCount;
941 DE_NULL, // const deUint32* pQueueFamilyIndices;
942 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
944 const VkImageUsageFlags compressedViewUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
945 const VkImageViewUsageCreateInfoKHR compressedViewUsageCI =
947 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
948 DE_NULL, //const void* pNext;
949 compressedViewUsageFlags, //VkImageUsageFlags usage;
951 Image resultImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
952 Image referenceImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
953 Image uncompressedImage (vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
954 Move<VkImageView> resultView = makeImageView(vk, device, resultImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
955 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
956 Move<VkImageView> referenceView = makeImageView(vk, device, referenceImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
957 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
958 Move<VkImageView> uncompressedView = makeImageView(vk, device, uncompressedImage.get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
959 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
960 Move<VkImageView> compressedView = makeImageView(vk, device, compressed, mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
961 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
962 Move<VkDescriptorSetLayout> descriptorSetLayout = DescriptorSetLayoutBuilder()
963 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
964 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
965 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
966 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
968 Move<VkDescriptorPool> descriptorPool = DescriptorPoolBuilder()
969 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
970 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
971 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
972 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
973 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
975 Move<VkDescriptorSet> descriptorSet = makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
976 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
977 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
978 const VkDeviceSize bufferSize = getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), VK_FORMAT_R8G8B8A8_UNORM);
979 Buffer resultBuffer (vk, device, allocator,
980 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
981 Buffer referenceBuffer (vk, device, allocator,
982 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
983 Buffer transferBuffer (vk, device, allocator,
984 makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
985 Move<VkSampler> sampler;
987 const VkSamplerCreateInfo createInfo =
989 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
990 DE_NULL, //const void* pNext;
991 0u, //VkSamplerCreateFlags flags;
992 VK_FILTER_NEAREST, //VkFilter magFilter;
993 VK_FILTER_NEAREST, //VkFilter minFilter;
994 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
995 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
996 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
997 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
998 0.0f, //float mipLodBias;
999 VK_FALSE, //VkBool32 anisotropyEnable;
1000 1.0f, //float maxAnisotropy;
1001 VK_FALSE, //VkBool32 compareEnable;
1002 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1003 0.0f, //float minLod;
1004 1.0f, //float maxLod;
1005 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1006 VK_FALSE, //VkBool32 unnormalizedCoordinates;
1008 sampler = createSampler(vk, device, &createInfo);
1011 VkDescriptorImageInfo descriptorImageInfos[] =
1013 makeDescriptorImageInfo(*sampler, *uncompressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1014 makeDescriptorImageInfo(*sampler, *compressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1015 makeDescriptorImageInfo(DE_NULL, *resultView, VK_IMAGE_LAYOUT_GENERAL),
1016 makeDescriptorImageInfo(DE_NULL, *referenceView, VK_IMAGE_LAYOUT_GENERAL)
1018 DescriptorSetUpdateBuilder()
1019 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1020 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1021 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1022 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1023 .update(vk, device);
1026 beginCommandBuffer(vk, cmdBuffer);
1028 const VkImageSubresourceRange subresourceRange =
1030 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1031 0u, //deUint32 baseMipLevel
1032 1u, //deUint32 levelCount
1033 0u, //deUint32 baseArrayLayer
1034 1u //deUint32 layerCount
1037 const VkImageSubresourceRange subresourceRangeComp =
1039 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1040 mipNdx, //deUint32 baseMipLevel
1041 1u, //deUint32 levelCount
1042 layerNdx, //deUint32 baseArrayLayer
1043 1u //deUint32 layerCount
1046 const VkBufferImageCopy copyRegion =
1048 0ull, // VkDeviceSize bufferOffset;
1049 0u, // deUint32 bufferRowLength;
1050 0u, // deUint32 bufferImageHeight;
1051 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1052 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1053 decompressedImageInfo.extent, // VkExtent3D imageExtent;
1056 const VkBufferImageCopy compressedCopyRegion =
1058 0ull, // VkDeviceSize bufferOffset;
1059 0u, // deUint32 bufferRowLength;
1060 0u, // deUint32 bufferImageHeight;
1061 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1062 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1063 extentUncompressed, // VkExtent3D imageExtent;
1068 const VkBufferMemoryBarrier preCopyBufferBarriers = makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1069 transferBuffer.get(), 0ull, bufferSizeComp);
1071 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1072 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1075 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1078 const VkBufferMemoryBarrier postCopyBufferBarriers = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1079 transferBuffer.get(), 0ull, bufferSizeComp);
1081 const VkImageMemoryBarrier preCopyImageBarriers = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1082 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1084 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1085 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1088 vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
1090 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1091 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1094 const VkImageMemoryBarrier preShaderImageBarriers[] =
1097 makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1098 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1099 uncompressedImage.get(), subresourceRange),
1101 makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT,
1102 VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1103 compressed, subresourceRangeComp),
1105 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1106 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1107 resultImage.get(), subresourceRange),
1109 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1110 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1111 referenceImage.get(), subresourceRange)
1114 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1115 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1116 DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1119 vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1122 const VkImageMemoryBarrier postShaderImageBarriers[] =
1124 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1125 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1126 resultImage.get(), subresourceRange),
1128 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1129 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1130 referenceImage.get(), subresourceRange)
1133 const VkBufferMemoryBarrier preCopyBufferBarrier[] =
1135 makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1136 resultBuffer.get(), 0ull, bufferSize),
1138 makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1139 referenceBuffer.get(), 0ull, bufferSize),
1142 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1143 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(preCopyBufferBarrier), preCopyBufferBarrier,
1144 DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1146 vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, ©Region);
1147 vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, ©Region);
1149 endCommandBuffer(vk, cmdBuffer);
1150 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1152 const Allocation& resultAlloc = resultBuffer.getAllocation();
1153 const Allocation& referenceAlloc = referenceBuffer.getAllocation();
1154 invalidateMappedMemoryRange(vk, device, resultAlloc.getMemory(), resultAlloc.getOffset(), bufferSize);
1155 invalidateMappedMemoryRange(vk, device, referenceAlloc.getMemory(), referenceAlloc.getOffset(), bufferSize);
1157 if (deMemCmp(resultAlloc.getHostPtr(), referenceAlloc.getHostPtr(), (size_t)bufferSize) != 0)
1159 ConstPixelBufferAccess resultPixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1160 ConstPixelBufferAccess referencePixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1162 if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1170 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1173 ImageStoreComputeTestInstance (Context& context,
1174 const TestParameters& parameters);
1176 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
1177 const VkDescriptorSetLayout& descriptorSetLayout,
1178 const VkDescriptorPool& descriptorPool,
1179 vector<ImageData>& imageData);
1183 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1184 :BasicComputeTestInstance (context, parameters)
1188 void ImageStoreComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
1189 const VkDescriptorSetLayout& descriptorSetLayout,
1190 const VkDescriptorPool& descriptorPool,
1191 vector<ImageData>& imageData)
1193 const DeviceInterface& vk = m_context.getDeviceInterface();
1194 const VkDevice device = m_context.getDevice();
1195 const VkQueue queue = m_context.getUniversalQueue();
1196 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1197 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
1198 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
1199 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1200 Move<VkSampler> sampler;
1202 const VkSamplerCreateInfo createInfo =
1204 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1205 DE_NULL, //const void* pNext;
1206 0u, //VkSamplerCreateFlags flags;
1207 VK_FILTER_NEAREST, //VkFilter magFilter;
1208 VK_FILTER_NEAREST, //VkFilter minFilter;
1209 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1210 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1211 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1212 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1213 0.0f, //float mipLodBias;
1214 VK_FALSE, //VkBool32 anisotropyEnable;
1215 1.0f, //float maxAnisotropy;
1216 VK_FALSE, //VkBool32 compareEnable;
1217 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1218 0.0f, //float minLod;
1219 0.0f, //float maxLod;
1220 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1221 VK_TRUE, //VkBool32 unnormalizedCoordinates;
1223 sampler = createSampler(vk, device, &createInfo);
1226 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
1227 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1229 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1230 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1232 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1233 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1237 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1238 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1240 beginCommandBuffer(vk, cmdBuffer);
1242 const VkImageSubresourceRange compressedRange =
1244 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1245 0u, //deUint32 baseMipLevel
1246 imageData[0].getImageInfo(0).mipLevels, //deUint32 levelCount
1247 0u, //deUint32 baseArrayLayer
1248 imageData[0].getImageInfo(0).arrayLayers //deUint32 layerCount
1251 const VkImageSubresourceRange uncompressedRange =
1253 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1254 0u, //deUint32 baseMipLevel
1255 1u, //deUint32 levelCount
1256 0u, //deUint32 baseArrayLayer
1257 1u //deUint32 layerCount
1260 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1262 vector<VkImageMemoryBarrier> preShaderImageBarriers (descriptorSets.size() * 2u + 1u);
1263 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1265 preShaderImageBarriers[imageNdx] = makeImageMemoryBarrier(
1266 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1267 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1268 imageData[1].getImage(imageNdx), uncompressedRange);
1270 preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()] = makeImageMemoryBarrier(
1271 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1272 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1273 imageData[2].getImage(imageNdx), uncompressedRange);
1276 preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1277 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1278 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1279 imageData[0].getImage(0u), compressedRange);
1281 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1282 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1283 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1285 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1287 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1288 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1289 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
1290 imageData[1].getImageInfo(ndx).extent.height,
1291 imageData[1].getImageInfo(ndx).extent.depth);
1294 endCommandBuffer(vk, cmdBuffer);
1295 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1298 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1301 GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters);
1302 virtual TestStatus iterate (void);
1305 virtual bool isWriteToCompressedOperation ();
1306 VkImageCreateInfo makeCreateImageInfo (const VkFormat format,
1307 const ImageType type,
1309 const VkImageUsageFlags usageFlags,
1310 const VkImageCreateFlags* createFlags,
1311 const deUint32 levels,
1312 const deUint32 layers);
1313 VkDeviceSize getCompressedImageData (const VkFormat format,
1315 std::vector<deUint8>& data,
1316 const deUint32 layer,
1317 const deUint32 level);
1318 VkDeviceSize getUncompressedImageData (const VkFormat format,
1320 std::vector<deUint8>& data,
1321 const deUint32 layer,
1322 const deUint32 level);
1323 virtual void prepareData ();
1324 virtual void prepareVertexBuffer ();
1325 virtual void transcodeRead ();
1326 virtual void transcodeWrite ();
1327 bool verifyDecompression (const std::vector<deUint8>& refCompressedData,
1328 const de::MovePtr<Image>& resCompressedImage,
1329 const deUint32 layer,
1330 const deUint32 level,
1331 const UVec3& mipmapDims);
1333 typedef std::vector<deUint8> RawDataVector;
1334 typedef SharedPtr<RawDataVector> RawDataPtr;
1335 typedef std::vector<RawDataPtr> LevelData;
1336 typedef std::vector<LevelData> FullImageData;
1338 FullImageData m_srcData;
1339 FullImageData m_dstData;
1341 typedef SharedPtr<Image> ImagePtr;
1342 typedef std::vector<ImagePtr> LevelImages;
1343 typedef std::vector<LevelImages> ImagesArray;
1345 ImagesArray m_uncompressedImages;
1346 MovePtr<Image> m_compressedImage;
1348 VkImageViewUsageCreateInfoKHR m_imageViewUsageKHR;
1349 VkImageViewUsageCreateInfoKHR* m_srcImageViewUsageKHR;
1350 VkImageViewUsageCreateInfoKHR* m_dstImageViewUsageKHR;
1351 std::vector<tcu::UVec3> m_compressedImageResVec;
1352 std::vector<tcu::UVec3> m_uncompressedImageResVec;
1353 VkFormat m_srcFormat;
1354 VkFormat m_dstFormat;
1355 VkImageUsageFlags m_srcImageUsageFlags;
1356 VkImageUsageFlags m_dstImageUsageFlags;
1357 std::vector<tcu::UVec3> m_srcImageResolutions;
1358 std::vector<tcu::UVec3> m_dstImageResolutions;
1360 MovePtr<Buffer> m_vertexBuffer;
1361 deUint32 m_vertexCount;
1362 VkDeviceSize m_vertexBufferOffset;
1365 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1366 : BasicTranscodingTestInstance(context, parameters)
1369 , m_uncompressedImages()
1370 , m_compressedImage()
1371 , m_imageViewUsageKHR()
1372 , m_srcImageViewUsageKHR()
1373 , m_dstImageViewUsageKHR()
1374 , m_compressedImageResVec()
1375 , m_uncompressedImageResVec()
1378 , m_srcImageUsageFlags()
1379 , m_dstImageUsageFlags()
1380 , m_srcImageResolutions()
1381 , m_dstImageResolutions()
1384 , m_vertexBufferOffset(0ull)
1388 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1391 prepareVertexBuffer();
1393 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1394 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1395 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1397 if (isWriteToCompressedOperation())
1402 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1403 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1404 if (isWriteToCompressedOperation())
1406 if (!verifyDecompression(*m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1407 return TestStatus::fail("Images difference detected");
1411 if (!verifyDecompression(*m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1412 return TestStatus::fail("Images difference detected");
1415 return TestStatus::pass("Pass");
1418 void GraphicsAttachmentsTestInstance::prepareData ()
1420 VkImageViewUsageCreateInfoKHR* imageViewUsageKHRNull = (VkImageViewUsageCreateInfoKHR*)DE_NULL;
1422 m_imageViewUsageKHR = makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1424 m_srcImageViewUsageKHR = isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1425 m_dstImageViewUsageKHR = isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1427 m_srcFormat = isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1428 m_dstFormat = isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1430 m_srcImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1431 m_dstImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1433 m_compressedImageResVec = getMipLevelSizes(getLayerDims());
1434 m_uncompressedImageResVec = getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1436 m_srcImageResolutions = isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1437 m_dstImageResolutions = isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1439 m_srcData.resize(getLevelCount());
1440 m_dstData.resize(getLevelCount());
1441 m_uncompressedImages.resize(getLevelCount());
1443 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1445 m_srcData[levelNdx].resize(getLayerCount());
1446 m_dstData[levelNdx].resize(getLayerCount());
1447 m_uncompressedImages[levelNdx].resize(getLayerCount());
1449 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1451 m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1452 m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1454 if (isWriteToCompressedOperation())
1456 getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1458 m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1462 getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1464 m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1467 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1472 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1474 const DeviceInterface& vk = m_context.getDeviceInterface();
1475 const VkDevice device = m_context.getDevice();
1476 Allocator& allocator = m_context.getDefaultAllocator();
1478 const std::vector<tcu::Vec4> vertexArray = createFullscreenQuad();
1479 const size_t vertexBufferSizeInBytes = vertexArray.size() * sizeof(vertexArray[0]);
1481 m_vertexCount = static_cast<deUint32>(vertexArray.size());
1482 m_vertexBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1484 // Upload vertex data
1485 const Allocation& vertexBufferAlloc = m_vertexBuffer->getAllocation();
1486 deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1487 flushMappedMemoryRange(vk, device, vertexBufferAlloc.getMemory(), vertexBufferAlloc.getOffset(), vertexBufferSizeInBytes);
1490 void GraphicsAttachmentsTestInstance::transcodeRead ()
1492 const DeviceInterface& vk = m_context.getDeviceInterface();
1493 const VkDevice device = m_context.getDevice();
1494 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1495 const VkQueue queue = m_context.getUniversalQueue();
1496 Allocator& allocator = m_context.getDefaultAllocator();
1498 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1500 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1501 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1503 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1504 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1506 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1508 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1509 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1510 .build(vk, device));
1511 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1512 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1513 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1514 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1516 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1517 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1518 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1520 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1521 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1523 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1525 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1526 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1527 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1528 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1529 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1530 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1532 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1534 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1535 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1537 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1538 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1540 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1541 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1542 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
1544 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1546 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1547 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1549 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1551 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1552 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1554 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1555 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1556 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1557 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1558 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1559 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1561 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1562 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1563 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1565 // Upload source image data
1566 const Allocation& alloc = srcImageBuffer->getAllocation();
1567 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1568 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1570 beginCommandBuffer(vk, *cmdBuffer);
1571 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1573 // Copy buffer to image
1574 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1575 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1576 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1578 // Define destination image layout
1579 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1581 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1583 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1584 DescriptorSetUpdateBuilder()
1585 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1586 .update(vk, device);
1588 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1589 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1591 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1592 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1594 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1596 vk.cmdEndRenderPass(*cmdBuffer);
1598 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1599 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1600 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1601 dstImage->get(), dstSubresourceRange);
1603 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1604 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1605 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1607 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1608 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1609 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1611 endCommandBuffer(vk, *cmdBuffer);
1613 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1615 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1616 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1617 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1621 m_compressedImage = srcImage;
1624 void GraphicsAttachmentsTestInstance::transcodeWrite ()
1626 const DeviceInterface& vk = m_context.getDeviceInterface();
1627 const VkDevice device = m_context.getDevice();
1628 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1629 const VkQueue queue = m_context.getUniversalQueue();
1630 Allocator& allocator = m_context.getDefaultAllocator();
1632 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1634 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1635 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1637 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1638 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1640 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1642 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1643 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1644 .build(vk, device));
1645 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1646 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1647 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1648 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1650 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1651 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1652 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1654 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1655 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1657 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1659 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1660 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1661 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1662 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1663 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1664 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1666 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1668 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1669 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1670 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
1672 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1674 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1675 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1677 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1678 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1680 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1681 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1683 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1685 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1686 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1688 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1689 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1690 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1691 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1692 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1693 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1695 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1696 const VkExtent2D framebufferSize (renderSize);
1697 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1699 // Upload source image data
1700 const Allocation& alloc = srcImageBuffer->getAllocation();
1701 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1702 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1704 beginCommandBuffer(vk, *cmdBuffer);
1705 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1707 // Copy buffer to image
1708 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1709 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1710 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1712 // Define destination image layout
1713 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1715 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1717 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1718 DescriptorSetUpdateBuilder()
1719 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1720 .update(vk, device);
1722 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1723 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1725 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1726 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1728 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1730 vk.cmdEndRenderPass(*cmdBuffer);
1732 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1733 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1734 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1735 dstImage->get(), dstSubresourceRange);
1737 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1738 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1739 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1741 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1742 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1743 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1745 endCommandBuffer(vk, *cmdBuffer);
1747 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1749 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1750 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1751 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1755 m_compressedImage = dstImage;
1758 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1760 return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1763 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat format,
1764 const ImageType type,
1766 const VkImageUsageFlags usageFlags,
1767 const VkImageCreateFlags* createFlags,
1768 const deUint32 levels,
1769 const deUint32 layers)
1771 const VkImageType imageType = mapImageType(type);
1772 const VkImageCreateFlags imageCreateFlagsBase = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1773 const VkImageCreateFlags imageCreateFlagsAddOn = isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR : 0;
1774 const VkImageCreateFlags imageCreateFlags = (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1776 const VkImageCreateInfo createImageInfo =
1778 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1779 DE_NULL, // const void* pNext;
1780 imageCreateFlags, // VkImageCreateFlags flags;
1781 imageType, // VkImageType imageType;
1782 format, // VkFormat format;
1783 makeExtent3D(getLayerSize(type, size)), // VkExtent3D extent;
1784 levels, // deUint32 mipLevels;
1785 layers, // deUint32 arrayLayers;
1786 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1787 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1788 usageFlags, // VkImageUsageFlags usage;
1789 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1790 0u, // deUint32 queueFamilyIndexCount;
1791 DE_NULL, // const deUint32* pQueueFamilyIndices;
1792 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1795 return createImageInfo;
1798 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat format,
1800 std::vector<deUint8>& data,
1801 const deUint32 layer,
1802 const deUint32 level)
1804 VkDeviceSize sizeBytes = getCompressedImageSizeInBytes(format, size);
1806 data.resize((size_t)sizeBytes);
1807 generateData(&data[0], data.size(), format, layer, level);
1812 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat format,
1814 std::vector<deUint8>& data,
1815 const deUint32 layer,
1816 const deUint32 level)
1818 tcu::IVec3 sizeAsIVec3 = tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
1819 VkDeviceSize sizeBytes = getImageSizeBytes(sizeAsIVec3, format);
1821 data.resize((size_t)sizeBytes);
1822 generateData(&data[0], data.size(), format, layer, level);
1827 bool GraphicsAttachmentsTestInstance::verifyDecompression (const std::vector<deUint8>& refCompressedData,
1828 const de::MovePtr<Image>& resCompressedImage,
1829 const deUint32 level,
1830 const deUint32 layer,
1831 const UVec3& mipmapDims)
1833 const DeviceInterface& vk = m_context.getDeviceInterface();
1834 const VkDevice device = m_context.getDevice();
1835 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1836 const VkQueue queue = m_context.getUniversalQueue();
1837 Allocator& allocator = m_context.getDefaultAllocator();
1839 const bool layoutShaderReadOnly = (layer % 2u) == 1;
1840 const UVec3 mipmapDimsBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
1842 const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1843 const VkImageSubresourceRange resSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
1845 const VkDeviceSize dstBufferSize = getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
1846 const VkImageUsageFlags refSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1848 const VkBufferCreateInfo refSrcImageBufferInfo (makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
1849 const MovePtr<Buffer> refSrcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
1851 const VkImageCreateFlags refSrcImageCreateFlags = 0;
1852 const VkImageCreateInfo refSrcImageCreateInfo = makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1853 const MovePtr<Image> refSrcImage (new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
1854 Move<VkImageView> refSrcImageView (makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
1856 const VkImageUsageFlags resSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1857 const VkImageViewUsageCreateInfoKHR resSrcImageViewUsageKHR = makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
1858 Move<VkImageView> resSrcImageView (makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
1860 const VkImageCreateFlags refDstImageCreateFlags = 0;
1861 const VkImageUsageFlags refDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1862 const VkImageCreateInfo refDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1863 const MovePtr<Image> refDstImage (new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
1864 const Move<VkImageView> refDstImageView (makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1865 const VkImageMemoryBarrier refDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
1866 const VkBufferCreateInfo refDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1867 const MovePtr<Buffer> refDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
1869 const VkImageCreateFlags resDstImageCreateFlags = 0;
1870 const VkImageUsageFlags resDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1871 const VkImageCreateInfo resDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1872 const MovePtr<Image> resDstImage (new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
1873 const Move<VkImageView> resDstImageView (makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1874 const VkImageMemoryBarrier resDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
1875 const VkBufferCreateInfo resDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1876 const MovePtr<Buffer> resDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
1878 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1879 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
1881 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
1883 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1884 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1885 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1886 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1887 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1888 .build(vk, device));
1889 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1890 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1891 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1892 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1893 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1894 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1895 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1896 const VkSamplerCreateInfo refSrcSamplerInfo (makeSamplerCreateInfo());
1897 const Move<VkSampler> refSrcSampler = vk::createSampler(vk, device, &refSrcSamplerInfo);
1898 const VkSamplerCreateInfo resSrcSamplerInfo (makeSamplerCreateInfo());
1899 const Move<VkSampler> resSrcSampler = vk::createSampler(vk, device, &resSrcSamplerInfo);
1900 const VkDescriptorImageInfo descriptorRefSrcImage (makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
1901 const VkDescriptorImageInfo descriptorResSrcImage (makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
1902 const VkDescriptorImageInfo descriptorRefDstImage (makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1903 const VkDescriptorImageInfo descriptorResDstImage (makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1905 const VkExtent2D renderSize (makeExtent2D(mipmapDims.x(), mipmapDims.y()));
1906 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1907 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
1908 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT, queueFamilyIndex));
1909 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1911 const VkBufferImageCopy copyBufferToImageRegion = makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
1912 const VkBufferImageCopy copyRegion = makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
1913 const VkBufferMemoryBarrier refSrcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
1914 const VkImageMemoryBarrier refSrcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1915 const VkImageMemoryBarrier refSrcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1916 const VkImageMemoryBarrier resCompressedImageBarrier = makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, resCompressedImage->get(), resSubresourceRange);
1918 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize, getLayerCount()));
1920 // Upload source image data
1922 const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
1923 deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
1924 flushMappedMemoryRange(vk, device, refSrcImageBufferAlloc.getMemory(), refSrcImageBufferAlloc.getOffset(), refCompressedData.size());
1927 beginCommandBuffer(vk, *cmdBuffer);
1928 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1930 // Copy buffer to image
1931 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
1932 vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, ©BufferToImageRegion);
1933 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
1935 // Make reference and result images readable
1936 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
1937 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
1939 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resCompressedImageBarrier);
1942 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1944 DescriptorSetUpdateBuilder()
1945 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
1946 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
1947 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
1948 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
1949 .update(vk, device);
1951 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1952 vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1953 vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
1955 vk.cmdEndRenderPass(*cmdBuffer);
1957 // Decompress reference image
1959 const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
1960 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1961 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1962 refDstImage->get(), subresourceRange);
1964 const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
1965 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1966 refDstBuffer->get(), 0ull, dstBufferSize);
1968 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
1969 vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, ©Region);
1970 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1973 // Decompress result image
1975 const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
1976 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1977 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1978 resDstImage->get(), subresourceRange);
1980 const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
1981 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1982 resDstBuffer->get(), 0ull, dstBufferSize);
1984 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
1985 vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, ©Region);
1986 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1989 endCommandBuffer(vk, *cmdBuffer);
1991 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1993 // Compare decompressed pixel data in reference and result images
1995 const Allocation& refDstBufferAlloc = refDstBuffer->getAllocation();
1996 invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
1998 const Allocation& resDstBufferAlloc = resDstBuffer->getAllocation();
1999 invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
2001 if (deMemCmp(refDstBufferAlloc.getHostPtr(), resDstBufferAlloc.getHostPtr(), (size_t)dstBufferSize) != 0)
2003 // Do fuzzy to log error mask
2004 invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
2005 invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
2007 tcu::ConstPixelBufferAccess resPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
2008 tcu::ConstPixelBufferAccess refPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
2010 string comment = string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
2012 if (isWriteToCompressedOperation())
2013 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2015 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2025 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2028 GraphicsTextureTestInstance (Context& context, const TestParameters& parameters);
2031 virtual bool isWriteToCompressedOperation ();
2032 virtual void transcodeRead ();
2033 virtual void transcodeWrite ();
2036 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2037 : GraphicsAttachmentsTestInstance(context, parameters)
2041 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2043 return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2046 void GraphicsTextureTestInstance::transcodeRead ()
2048 const DeviceInterface& vk = m_context.getDeviceInterface();
2049 const VkDevice device = m_context.getDevice();
2050 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2051 const VkQueue queue = m_context.getUniversalQueue();
2052 Allocator& allocator = m_context.getDefaultAllocator();
2054 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2056 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2057 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2059 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2060 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2062 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2064 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2065 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2066 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2067 .build(vk, device));
2068 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2069 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2070 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2071 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2072 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2074 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2075 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2076 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2078 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2079 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2081 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2083 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2084 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2085 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2086 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2087 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2088 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2090 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2092 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2093 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2095 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2096 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2098 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2099 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2100 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
2102 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2104 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2105 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2107 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2109 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2110 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2112 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2113 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2114 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2115 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2117 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2118 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2119 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2120 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2121 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2122 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2124 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2125 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2127 // Upload source image data
2128 const Allocation& alloc = srcImageBuffer->getAllocation();
2129 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2130 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2132 beginCommandBuffer(vk, *cmdBuffer);
2133 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2135 // Copy buffer to image
2136 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2137 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2138 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2140 // Define destination image layout
2141 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2143 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2145 DescriptorSetUpdateBuilder()
2146 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2147 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2148 .update(vk, device);
2150 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2151 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2153 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2154 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2156 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2158 vk.cmdEndRenderPass(*cmdBuffer);
2160 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2161 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2162 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2163 dstImage->get(), dstSubresourceRange);
2165 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2166 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2167 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2169 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2170 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2171 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2173 endCommandBuffer(vk, *cmdBuffer);
2175 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2177 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2178 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2179 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2183 m_compressedImage = srcImage;
2186 void GraphicsTextureTestInstance::transcodeWrite ()
2188 const DeviceInterface& vk = m_context.getDeviceInterface();
2189 const VkDevice device = m_context.getDevice();
2190 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2191 const VkQueue queue = m_context.getUniversalQueue();
2192 Allocator& allocator = m_context.getDefaultAllocator();
2194 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2196 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2197 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2199 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2200 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2202 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2204 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2205 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2206 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2207 .build(vk, device));
2208 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2209 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2210 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2211 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2212 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2214 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2215 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2216 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2218 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2219 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2221 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2223 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2224 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2225 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2226 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2227 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2228 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2230 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2232 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2233 const VkViewport viewport = makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2234 const VkRect2D scissor = makeScissor(renderSize.width, renderSize.height);
2236 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2238 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2239 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2241 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2242 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2244 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2245 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2247 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2249 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2250 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2252 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2253 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2254 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2255 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2257 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2258 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2259 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2260 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2261 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2262 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2264 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2265 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2267 // Upload source image data
2268 const Allocation& alloc = srcImageBuffer->getAllocation();
2269 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2270 flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2272 beginCommandBuffer(vk, *cmdBuffer);
2273 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2275 // Copy buffer to image
2276 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2277 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2278 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2280 // Define destination image layout
2281 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2283 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2285 DescriptorSetUpdateBuilder()
2286 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2287 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2288 .update(vk, device);
2290 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2291 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2293 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2294 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2296 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2298 vk.cmdEndRenderPass(*cmdBuffer);
2300 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2301 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2302 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2303 dstImage->get(), dstSubresourceRange);
2305 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2306 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2307 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2309 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2310 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2311 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2313 endCommandBuffer(vk, *cmdBuffer);
2315 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2317 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2318 invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2319 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2323 m_compressedImage = dstImage;
2326 class TexelViewCompatibleCase : public TestCase
2329 TexelViewCompatibleCase (TestContext& testCtx,
2330 const std::string& name,
2331 const std::string& desc,
2332 const TestParameters& parameters);
2333 void initPrograms (SourceCollections& programCollection) const;
2334 TestInstance* createInstance (Context& context) const;
2336 const TestParameters m_parameters;
2339 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
2340 : TestCase (testCtx, name, desc)
2341 , m_parameters (parameters)
2345 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections& programCollection) const
2347 DE_ASSERT(m_parameters.size.x() > 0);
2348 DE_ASSERT(m_parameters.size.y() > 0);
2350 switch (m_parameters.shader)
2352 case SHADER_TYPE_COMPUTE:
2354 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2355 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2356 std::ostringstream src;
2357 std::ostringstream src_decompress;
2359 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2360 << "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2361 src_decompress << src.str();
2363 switch(m_parameters.operation)
2365 case OPERATION_IMAGE_LOAD:
2367 src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2368 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2369 << "void main (void)\n"
2371 << " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2372 << " imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2378 case OPERATION_TEXEL_FETCH:
2380 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2381 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2382 << "void main (void)\n"
2384 << " ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2385 << " imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n"
2391 case OPERATION_TEXTURE:
2393 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2394 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2395 << "void main (void)\n"
2397 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x - 1, gl_NumWorkGroups.y - 1);\n"
2398 << " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2399 << " const vec2 coord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2400 << " imageStore(u_image1, pos, texture(u_image0, coord));\n"
2406 case OPERATION_IMAGE_STORE:
2408 src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<" u_image0;\n"
2409 << "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image1;\n"
2410 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2411 << "void main (void)\n"
2413 << " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2414 << " imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2415 << " imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2425 src_decompress << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2426 << "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_reference;\n"
2427 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_result;\n"
2428 << "layout (binding = 3, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_reference;\n\n"
2429 << "void main (void)\n"
2431 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2432 << " const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2433 << " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n"
2434 << " imageStore(decompressed_result, pos, texture(compressed_result, cord));\n"
2435 << " imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n"
2437 programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2438 programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2443 case SHADER_TYPE_FRAGMENT:
2445 ImageType imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2449 std::ostringstream src;
2450 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2451 << "layout(location = 0) in vec4 v_in_position;\n"
2453 << "void main (void)\n"
2455 << " gl_Position = v_in_position;\n"
2458 programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2463 switch(m_parameters.operation)
2465 case OPERATION_ATTACHMENT_READ:
2466 case OPERATION_ATTACHMENT_WRITE:
2468 std::ostringstream src;
2470 const std::string dstTypeStr = getGlslFormatType(m_parameters.formatUncompressed);
2471 const std::string srcTypeStr = getGlslInputFormatType(m_parameters.formatUncompressed);
2473 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2474 << "precision highp int;\n"
2475 << "precision highp float;\n"
2477 << "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2478 << "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2480 << "void main (void)\n"
2482 << " o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2485 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2490 case OPERATION_TEXTURE_READ:
2491 case OPERATION_TEXTURE_WRITE:
2493 std::ostringstream src;
2495 const std::string srcSamplerTypeStr = getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2496 const std::string dstImageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2497 const std::string dstFormatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2499 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2500 << "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2501 << "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2503 << "void main (void)\n"
2505 << " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2506 << " const ivec2 pixels_resolution = ivec2(textureSize(u_imageIn, 0)) - ivec2(1,1);\n"
2507 << " const vec2 in_pos = vec2(out_pos) / vec2(pixels_resolution);\n"
2508 << " imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2511 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2521 // Verification fragment shader
2523 std::ostringstream src;
2525 const std::string samplerType = getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2526 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2527 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2529 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2530 << "layout (binding = 0) uniform " << samplerType << " u_imageIn0;\n"
2531 << "layout (binding = 1) uniform " << samplerType << " u_imageIn1;\n"
2532 << "layout (binding = 2, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut0;\n"
2533 << "layout (binding = 3, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut1;\n"
2535 << "void main (void)\n"
2537 << " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2539 << " const ivec2 pixels_resolution0 = ivec2(textureSize(u_imageIn0, 0)) - ivec2(1,1);\n"
2540 << " const vec2 in_pos0 = vec2(out_pos) / vec2(pixels_resolution0);\n"
2541 << " imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2543 << " const ivec2 pixels_resolution1 = ivec2(textureSize(u_imageIn1, 0)) - ivec2(1,1);\n"
2544 << " const vec2 in_pos1 = vec2(out_pos) / vec2(pixels_resolution1);\n"
2545 << " imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2548 programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2559 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2561 const VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
2562 const InstanceInterface& vk = context.getInstanceInterface();
2564 if (!m_parameters.useMipmaps)
2566 DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size) == 1u);
2567 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).z() == 1u);
2570 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() > 0u);
2571 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() > 0u);
2573 if (std::find(context.getDeviceExtensions().begin(), context.getDeviceExtensions().end(), "VK_KHR_maintenance2") == context.getDeviceExtensions().end())
2574 TCU_THROW(NotSupportedError, "Extension VK_KHR_maintenance2 not supported");
2577 VkImageFormatProperties imageFormatProperties;
2579 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2580 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2581 m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties))
2582 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2584 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2585 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2586 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2587 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
2588 &imageFormatProperties))
2589 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2593 const VkPhysicalDeviceFeatures physicalDeviceFeatures = getPhysicalDeviceFeatures (vk, physicalDevice);
2595 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2596 !physicalDeviceFeatures.textureCompressionBC)
2597 TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2599 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2600 !physicalDeviceFeatures.textureCompressionETC2)
2601 TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2603 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK) &&
2604 !physicalDeviceFeatures.textureCompressionASTC_LDR)
2605 TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2608 switch (m_parameters.shader)
2610 case SHADER_TYPE_COMPUTE:
2612 switch (m_parameters.operation)
2614 case OPERATION_IMAGE_LOAD:
2615 case OPERATION_TEXEL_FETCH:
2616 case OPERATION_TEXTURE:
2617 return new BasicComputeTestInstance(context, m_parameters);
2618 case OPERATION_IMAGE_STORE:
2619 return new ImageStoreComputeTestInstance(context, m_parameters);
2621 TCU_THROW(InternalError, "Impossible");
2625 case SHADER_TYPE_FRAGMENT:
2627 switch (m_parameters.operation)
2629 case OPERATION_ATTACHMENT_READ:
2630 case OPERATION_ATTACHMENT_WRITE:
2631 return new GraphicsAttachmentsTestInstance(context, m_parameters);
2633 case OPERATION_TEXTURE_READ:
2634 case OPERATION_TEXTURE_WRITE:
2635 return new GraphicsTextureTestInstance(context, m_parameters);
2638 TCU_THROW(InternalError, "Impossible");
2643 TCU_THROW(InternalError, "Impossible");
2649 static tcu::UVec3 getUnniceResolution(const VkFormat format, const deUint32 layers)
2651 const deUint32 unniceMipmapTextureSize[] = { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2652 const deUint32 baseTextureWidth = unniceMipmapTextureSize[getBlockWidth(format)];
2653 const deUint32 baseTextureHeight = unniceMipmapTextureSize[getBlockHeight(format)];
2654 const deUint32 baseTextureWidthLevels = deLog2Floor32(baseTextureWidth);
2655 const deUint32 baseTextureHeightLevels = deLog2Floor32(baseTextureHeight);
2656 const deUint32 widthMultiplier = (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2657 const deUint32 heightMultiplier = (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2658 const deUint32 width = baseTextureWidth * widthMultiplier;
2659 const deUint32 height = baseTextureHeight * heightMultiplier;
2661 // Number of levels should be same on both axises
2662 DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
2664 return tcu::UVec3(width, height, layers);
2667 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
2671 const VkFormat* formats;
2675 const bool mipmapness[] =
2681 const std::string pipelineName[SHADER_TYPE_LAST] =
2687 const std::string mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)] =
2693 const std::string operationName[OPERATION_LAST] =
2705 const VkImageUsageFlags baseImageUsageFlagSet = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2706 const VkImageUsageFlags compressedImageUsageFlags[OPERATION_LAST] =
2708 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), // "image_load"
2709 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texel_fetch"
2710 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture"
2711 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "image_store"
2712 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), // "attachment_read"
2713 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT), // "attachment_write"
2714 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), // "texture_read"
2715 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture_write"
2718 const VkImageUsageFlags compressedImageViewUsageFlags[OPERATION_LAST] =
2720 compressedImageUsageFlags[0], //"image_load"
2721 compressedImageUsageFlags[1], //"texel_fetch"
2722 compressedImageUsageFlags[2], //"texture"
2723 compressedImageUsageFlags[3], //"image_store"
2724 compressedImageUsageFlags[4], //"attachment_read"
2725 compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, //"attachment_write"
2726 compressedImageUsageFlags[6], //"texture_read"
2727 compressedImageUsageFlags[7], //"texture_write"
2730 const VkImageUsageFlags uncompressedImageUsageFlags[OPERATION_LAST] =
2732 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), //"image_load"
2733 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texel_fetch"
2734 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texture"
2735 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"image_store"
2736 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"attachment_read"
2737 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT), //"attachment_write"
2738 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_read"
2739 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_write"
2742 const VkFormat compressedFormats64bit[] =
2744 VK_FORMAT_BC1_RGB_UNORM_BLOCK,
2745 VK_FORMAT_BC1_RGB_SRGB_BLOCK,
2746 VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
2747 VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
2748 VK_FORMAT_BC4_UNORM_BLOCK,
2749 VK_FORMAT_BC4_SNORM_BLOCK,
2750 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
2751 VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
2752 VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
2753 VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
2754 VK_FORMAT_EAC_R11_UNORM_BLOCK,
2755 VK_FORMAT_EAC_R11_SNORM_BLOCK,
2758 const VkFormat compressedFormats128bit[] =
2760 VK_FORMAT_BC2_UNORM_BLOCK,
2761 VK_FORMAT_BC2_SRGB_BLOCK,
2762 VK_FORMAT_BC3_UNORM_BLOCK,
2763 VK_FORMAT_BC3_SRGB_BLOCK,
2764 VK_FORMAT_BC5_UNORM_BLOCK,
2765 VK_FORMAT_BC5_SNORM_BLOCK,
2766 VK_FORMAT_BC6H_UFLOAT_BLOCK,
2767 VK_FORMAT_BC6H_SFLOAT_BLOCK,
2768 VK_FORMAT_BC7_UNORM_BLOCK,
2769 VK_FORMAT_BC7_SRGB_BLOCK,
2770 VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
2771 VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
2772 VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
2773 VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
2774 VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
2775 VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
2776 VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
2777 VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
2778 VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
2779 VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
2780 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
2781 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
2782 VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
2783 VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
2784 VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
2785 VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
2786 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
2787 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
2788 VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
2789 VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
2790 VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
2791 VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
2792 VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
2793 VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
2794 VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
2795 VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
2796 VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
2797 VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
2798 VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
2799 VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
2800 VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
2801 VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
2804 const VkFormat uncompressedFormats64bit[] =
2806 VK_FORMAT_R16G16B16A16_UNORM,
2807 VK_FORMAT_R16G16B16A16_SNORM,
2808 VK_FORMAT_R16G16B16A16_USCALED,
2809 VK_FORMAT_R16G16B16A16_SSCALED,
2810 VK_FORMAT_R16G16B16A16_UINT,
2811 VK_FORMAT_R16G16B16A16_SINT,
2812 VK_FORMAT_R16G16B16A16_SFLOAT,
2813 VK_FORMAT_R32G32_UINT,
2814 VK_FORMAT_R32G32_SINT,
2815 VK_FORMAT_R32G32_SFLOAT,
2816 //VK_FORMAT_R64_UINT, remove from the test it couln'd not be use
2817 //VK_FORMAT_R64_SINT, remove from the test it couln'd not be use
2818 //VK_FORMAT_R64_SFLOAT, remove from the test it couln'd not be use
2821 const VkFormat uncompressedFormats128bit[] =
2823 VK_FORMAT_R32G32B32A32_UINT,
2824 VK_FORMAT_R32G32B32A32_SINT,
2825 VK_FORMAT_R32G32B32A32_SFLOAT,
2826 //VK_FORMAT_R64G64_UINT, remove from the test it couln'd not be use
2827 //VK_FORMAT_R64G64_SINT, remove from the test it couln'd not be use
2828 //VK_FORMAT_R64G64_SFLOAT, remove from the test it couln'd not be use
2831 const FormatsArray formatsCompressedSets[] =
2834 compressedFormats64bit,
2835 DE_LENGTH_OF_ARRAY(compressedFormats64bit)
2838 compressedFormats128bit,
2839 DE_LENGTH_OF_ARRAY(compressedFormats128bit)
2843 const FormatsArray formatsUncompressedSets[] =
2846 uncompressedFormats64bit,
2847 DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
2850 uncompressedFormats128bit,
2851 DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
2855 DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
2857 MovePtr<tcu::TestCaseGroup> texelViewCompatibleTests (new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
2859 for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
2861 MovePtr<tcu::TestCaseGroup> pipelineTypeGroup (new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
2863 for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
2865 const bool mipmapTest = mipmapness[mipmapTestNdx];
2867 MovePtr<tcu::TestCaseGroup> mipmapTypeGroup (new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
2869 for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
2871 if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
2874 if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
2877 MovePtr<tcu::TestCaseGroup> imageOperationGroup (new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
2879 // Iterate through bitness groups (64 bit, 128 bit, etc)
2880 for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
2882 for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
2884 const VkFormat formatCompressed = formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
2885 const std::string compressedFormatGroupName = getFormatShortString(formatCompressed);
2886 MovePtr<tcu::TestCaseGroup> compressedFormatGroup (new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
2888 for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
2890 const VkFormat formatUncompressed = formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
2891 const std::string uncompressedFormatGroupName = getFormatShortString(formatUncompressed);
2892 const TestParameters parameters =
2894 static_cast<Operation>(operationNdx),
2895 static_cast<ShaderType>(shaderType),
2896 mipmapTest ? getUnniceResolution(formatCompressed, 3u) : UVec3(64u, 64u, 1u),
2900 (operationNdx == OPERATION_IMAGE_STORE) ? 3u : 2u,
2901 compressedImageUsageFlags[operationNdx],
2902 compressedImageViewUsageFlags[operationNdx],
2903 uncompressedImageUsageFlags[operationNdx],
2905 VK_FORMAT_R8G8B8A8_UNORM
2908 compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
2911 imageOperationGroup->addChild(compressedFormatGroup.release());
2915 mipmapTypeGroup->addChild(imageOperationGroup.release());
2918 pipelineTypeGroup->addChild(mipmapTypeGroup.release());
2921 texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
2924 return texelViewCompatibleTests.release();