1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
5 * Copyright (c) 2021 The Khronos Group Inc.
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
21 * \brief Video Decoding Session tests
22 *//*--------------------------------------------------------------------*/
24 #include "vktVideoDecodeTests.hpp"
25 #include "vktVideoTestUtils.hpp"
26 #include "vktTestCase.hpp"
27 #include "vktVideoPictureUtils.hpp"
29 #include "tcuTextureUtil.hpp"
30 #include "tcuVectorUtil.hpp"
31 #include "tcuTestLog.hpp"
32 #include "tcuPlatform.hpp"
33 #include "tcuFunctionLibrary.hpp"
34 #include "tcuImageCompare.hpp"
37 #include "vkBufferWithMemory.hpp"
38 #include "vkImageWithMemory.hpp"
39 #include "vkImageUtil.hpp"
40 #include "vkBarrierUtil.hpp"
41 #include "vkObjUtil.hpp"
42 #include "vkCmdUtil.hpp"
43 #include "vkTypeUtil.hpp"
45 #include "../ycbcr/vktYCbCrUtil.hpp"
47 #if (DE_OS != DE_OS_ANDROID)
48 #include "vktVideoSessionNvUtils.hpp"
49 #include "vktVideoSessionFfmpegUtils.hpp"
50 #include "vktVideoBaseDecodeUtils.hpp"
66 using vkt::ycbcr::MultiPlaneImageData;
71 TEST_TYPE_H264_DECODE_I, // Case 6
72 TEST_TYPE_H264_DECODE_I_P, // Case 7
73 TEST_TYPE_H264_DECODE_I_P_B_13, // Case 7a
74 TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER, // Case 8
75 TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER, // Case 8a
76 TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS, // Case 9
77 TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE, // Case 17
78 TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB, // Case 18
79 TEST_TYPE_H264_DECODE_INTERLEAVED, // Case 21
80 TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED, // Case 23
81 TEST_TYPE_H264_H265_DECODE_INTERLEAVED, // Case 24
83 TEST_TYPE_H265_DECODE_I, // Case 15
84 TEST_TYPE_H265_DECODE_I_P, // Case 16
85 TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER, // Case 16-2
86 TEST_TYPE_H265_DECODE_I_P_B_13 , // Case 16-3
87 TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER, // Case 16-4
97 // Vulkan video is not supported on android platform
98 // all external libraries, helper functions and test instances has been excluded
99 #if (DE_OS != DE_OS_ANDROID)
100 DecodedFrame initDecodeFrame (void)
102 DecodedFrame frameTemplate =
104 -1, // int32_t pictureIndex;
105 DE_NULL, // const ImageObject* pDecodedImage;
106 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout decodedImageLayout;
107 DE_NULL, // VkFence frameCompleteFence;
108 DE_NULL, // VkFence frameConsumerDoneFence;
109 DE_NULL, // VkSemaphore frameCompleteSemaphore;
110 DE_NULL, // VkSemaphore frameConsumerDoneSemaphore;
111 DE_NULL, // VkQueryPool queryPool;
112 0, // int32_t startQueryId;
113 0, // uint32_t numQueries;
114 0, // uint64_t timestamp;
115 0, // uint32_t hasConsummerSignalFence : 1;
116 0, // uint32_t hasConsummerSignalSemaphore : 1;
117 0, // int32_t decodeOrder;
118 0, // int32_t displayOrder;
121 return frameTemplate;
124 // Avoid useless sampler in writeImage 2.5x faster
125 MovePtr<tcu::TextureLevel> convertToRGBASized (const tcu::ConstPixelBufferAccess& src, const tcu::UVec2& size)
127 const tcu::TextureFormat format (tcu::TextureFormat::RGB, tcu::TextureFormat::UNORM_INT8);
128 MovePtr<tcu::TextureLevel> result (new tcu::TextureLevel(format, size.x(), size.y()));
129 tcu::PixelBufferAccess access (result->getAccess());
131 for (int y = 0; y < result->getHeight(); ++y)
132 for (int x = 0; x < result->getWidth(); ++x)
133 access.setPixel(src.getPixelUint(x, y), x, y);
138 MovePtr<tcu::TextureLevel> convertToRGBA (const tcu::ConstPixelBufferAccess& src)
140 return convertToRGBASized(src, tcu::UVec2((uint32_t)src.getWidth(), (uint32_t)src.getHeight()));
143 MovePtr<MultiPlaneImageData> getDecodedImage (const DeviceInterface& vkd,
145 Allocator& allocator,
147 VkImageLayout layout,
149 VkExtent2D codedExtent,
150 deUint32 queueFamilyIndexTransfer,
151 deUint32 queueFamilyIndexDecode)
153 MovePtr<MultiPlaneImageData> multiPlaneImageData (new MultiPlaneImageData(format, tcu::UVec2(codedExtent.width, codedExtent.height)));
154 const VkQueue queueDecode = getDeviceQueue(vkd, device, queueFamilyIndexDecode, 0u);
155 const VkQueue queueTransfer = getDeviceQueue(vkd, device, queueFamilyIndexTransfer, 0u);
156 const VkImageSubresourceRange imageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
157 const VkImageMemoryBarrier2KHR imageBarrierDecode = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR,
158 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR,
159 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
162 VK_IMAGE_LAYOUT_GENERAL,
164 imageSubresourceRange);
165 const VkImageMemoryBarrier2KHR imageBarrierOwnershipDecode = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
167 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
169 VK_IMAGE_LAYOUT_GENERAL,
170 VK_IMAGE_LAYOUT_GENERAL,
172 imageSubresourceRange,
173 queueFamilyIndexDecode,
174 queueFamilyIndexTransfer);
175 const VkImageMemoryBarrier2KHR imageBarrierOwnershipTransfer = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
177 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
179 VK_IMAGE_LAYOUT_GENERAL,
180 VK_IMAGE_LAYOUT_GENERAL,
182 imageSubresourceRange,
183 queueFamilyIndexDecode,
184 queueFamilyIndexTransfer);
185 const VkImageMemoryBarrier2KHR imageBarrierTransfer = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR,
186 VK_ACCESS_2_TRANSFER_READ_BIT_KHR,
187 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
189 VK_IMAGE_LAYOUT_GENERAL,
190 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
192 imageSubresourceRange);
193 const Move<VkCommandPool> cmdDecodePool (makeCommandPool(vkd, device, queueFamilyIndexDecode));
194 const Move<VkCommandBuffer> cmdDecodeBuffer (allocateCommandBuffer(vkd, device, *cmdDecodePool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
195 const Move<VkCommandPool> cmdTransferPool (makeCommandPool(vkd, device, queueFamilyIndexTransfer));
196 const Move<VkCommandBuffer> cmdTransferBuffer (allocateCommandBuffer(vkd, device, *cmdTransferPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
197 Move<VkSemaphore> semaphore = createSemaphore(vkd, device);
198 Move<VkFence> decodeFence = createFence(vkd, device);
199 Move<VkFence> transferFence = createFence(vkd, device);
200 VkFence fences[] = { *decodeFence, *transferFence };
201 const VkPipelineStageFlags waitDstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
202 const VkSubmitInfo decodeSubmitInfo
204 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
205 DE_NULL, // const void* pNext;
206 0u, // deUint32 waitSemaphoreCount;
207 DE_NULL, // const VkSemaphore* pWaitSemaphores;
208 DE_NULL, // const VkPipelineStageFlags* pWaitDstStageMask;
209 1u, // deUint32 commandBufferCount;
210 &*cmdDecodeBuffer, // const VkCommandBuffer* pCommandBuffers;
211 1u, // deUint32 signalSemaphoreCount;
212 &*semaphore, // const VkSemaphore* pSignalSemaphores;
214 const VkSubmitInfo transferSubmitInfo
216 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
217 DE_NULL, // const void* pNext;
218 1u, // deUint32 waitSemaphoreCount;
219 &*semaphore, // const VkSemaphore* pWaitSemaphores;
220 &waitDstStageMask, // const VkPipelineStageFlags* pWaitDstStageMask;
221 1u, // deUint32 commandBufferCount;
222 &*cmdTransferBuffer, // const VkCommandBuffer* pCommandBuffers;
223 0u, // deUint32 signalSemaphoreCount;
224 DE_NULL, // const VkSemaphore* pSignalSemaphores;
227 DEBUGLOG(std::cout << "getDecodedImage: " << image << " " << layout << std::endl);
229 beginCommandBuffer(vkd, *cmdDecodeBuffer, 0u);
230 cmdPipelineImageMemoryBarrier2(vkd, *cmdDecodeBuffer, &imageBarrierDecode);
231 cmdPipelineImageMemoryBarrier2(vkd, *cmdDecodeBuffer, &imageBarrierOwnershipDecode);
232 endCommandBuffer(vkd, *cmdDecodeBuffer);
234 beginCommandBuffer(vkd, *cmdTransferBuffer, 0u);
235 cmdPipelineImageMemoryBarrier2(vkd, *cmdTransferBuffer, &imageBarrierOwnershipTransfer);
236 cmdPipelineImageMemoryBarrier2(vkd, *cmdTransferBuffer, &imageBarrierTransfer);
237 endCommandBuffer(vkd, *cmdTransferBuffer);
239 VK_CHECK(vkd.queueSubmit(queueDecode, 1u, &decodeSubmitInfo, *decodeFence));
240 VK_CHECK(vkd.queueSubmit(queueTransfer, 1u, &transferSubmitInfo, *transferFence));
242 VK_CHECK(vkd.waitForFences(device, DE_LENGTH_OF_ARRAY(fences), fences, DE_TRUE, ~0ull));
244 vkt::ycbcr::downloadImage(vkd, device, queueFamilyIndexTransfer, allocator, image, multiPlaneImageData.get(), 0, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
246 return multiPlaneImageData;
249 class VideoDecodeTestInstance : public VideoBaseTestInstance
252 typedef std::pair<tcu::IVec3, tcu::IVec3> ReferencePixel;
254 VideoDecodeTestInstance (Context& context,
255 const CaseDef& data);
256 ~VideoDecodeTestInstance (void);
258 MovePtr<vector<deUint8>> loadTestVideoData (void);
260 tcu::TestStatus iterate (void);
261 tcu::TestStatus iterateSingleFrame (void);
262 tcu::TestStatus iterateDoubleFrame (void);
263 tcu::TestStatus iterateMultipleFrame (void);
264 bool verifyImage (uint32_t frameNumber,
265 const MultiPlaneImageData& multiPlaneImageData);
266 bool verifyImageMultipleFrame (uint32_t frameNumber,
267 const MultiPlaneImageData& multiPlaneImageData);
268 bool verifyImageMultipleFrameNoReference (uint32_t frameNumber,
269 const MultiPlaneImageData& multiPlaneImageData,
270 const vector<ReferencePixel>& referencePixels);
271 bool verifyImageMultipleFrameWithReference (uint32_t frameNumber,
272 const MultiPlaneImageData& multiPlaneImageData);
275 MovePtr<VideoBaseDecoder> m_decoder;
276 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation;
277 int32_t m_frameCountTrigger;
278 bool m_queryWithStatusRequired;
281 VideoDecodeTestInstance::VideoDecodeTestInstance (Context& context, const CaseDef& data)
282 : VideoBaseTestInstance (context)
284 , m_decoder (new VideoBaseDecoder(context))
285 , m_videoCodecOperation (VK_VIDEO_CODEC_OPERATION_NONE_KHR)
286 , m_frameCountTrigger (0)
287 , m_queryWithStatusRequired (false)
289 const bool queryResultWithStatus = m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS;
290 const bool twoCachedPicturesSwapped = queryResultWithStatus
291 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER
292 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER;
293 const bool randomOrSwapped = twoCachedPicturesSwapped
294 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER
295 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER;
296 const uint32_t gopSize = m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE ? 15
297 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB ? 15
299 const uint32_t gopCount = m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE ? 2
300 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB ? 1
302 const bool submitDuringRecord = m_caseDef.testType == TEST_TYPE_H264_DECODE_I
303 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P
304 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I
305 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P;
306 const bool submitAfter = !submitDuringRecord;
308 m_frameCountTrigger = m_caseDef.testType == TEST_TYPE_H264_DECODE_I ? 1
309 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P ? 2
310 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13 ? 13 * 2
311 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER ? 2
312 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER ? 13 * 2
313 : m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS ? 2
314 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE ? 15 * 2
315 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB ? 15 * 2
316 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I ? 1
317 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P ? 2
318 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER ? 2
319 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13 ? 13 * 2
320 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER ? 13 * 2
323 m_decoder->setDecodeParameters(randomOrSwapped, queryResultWithStatus, m_frameCountTrigger, submitAfter, gopSize, gopCount);
325 m_videoCodecOperation = de::inBounds(m_caseDef.testType, TEST_TYPE_H264_DECODE_I, TEST_TYPE_H265_DECODE_I) ? VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR
326 : de::inBounds(m_caseDef.testType, TEST_TYPE_H265_DECODE_I, TEST_TYPE_LAST) ? VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
327 : VK_VIDEO_CODEC_OPERATION_NONE_KHR;
329 DE_ASSERT(m_videoCodecOperation != VK_VIDEO_CODEC_OPERATION_NONE_KHR);
331 m_queryWithStatusRequired = (m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS);
334 VideoDecodeTestInstance::~VideoDecodeTestInstance (void)
338 MovePtr<vector<deUint8>> VideoDecodeTestInstance::loadTestVideoData (void)
340 switch (m_caseDef.testType)
342 case TEST_TYPE_H264_DECODE_I:
343 case TEST_TYPE_H264_DECODE_I_P:
344 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER:
345 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS: return loadVideoDataClipA();
346 case TEST_TYPE_H264_DECODE_I_P_B_13:
347 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return loadVideoDataClipH264G13();
348 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE:
349 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB: return loadVideoDataClipC();
350 case TEST_TYPE_H265_DECODE_I:
351 case TEST_TYPE_H265_DECODE_I_P:
352 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER: return loadVideoDataClipD();
353 case TEST_TYPE_H265_DECODE_I_P_B_13:
354 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return loadVideoDataClipH265G13();
356 default: TCU_THROW(InternalError, "Unknown testType");
360 tcu::TestStatus VideoDecodeTestInstance::iterate (void)
362 if (m_frameCountTrigger == 1)
363 return iterateSingleFrame();
364 else if (m_frameCountTrigger == 2)
365 return iterateDoubleFrame();
367 return iterateMultipleFrame();
370 vk::VkExtensionProperties getExtensionVersion (VkVideoCodecOperationFlagBitsKHR videoCodecOperation)
372 // FIXME: last spec version accepted by the parser function
373 //static const vk::VkExtensionProperties h264StdExtensionVersion = { VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME, VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION };
374 static const vk::VkExtensionProperties h264StdExtensionVersion = { VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME, VK_MAKE_VIDEO_STD_VERSION(0, 9, 8) };
375 //static const vk::VkExtensionProperties h265StdExtensionVersion = { VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME, VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION };
376 static const vk::VkExtensionProperties h265StdExtensionVersion = { VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME, VK_MAKE_VIDEO_STD_VERSION(0, 9, 9) };
378 if (videoCodecOperation == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) {
379 return h264StdExtensionVersion;
380 } else if (videoCodecOperation == VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR) {
381 return h265StdExtensionVersion;
384 TCU_THROW(InternalError, "Unsupported Codec Type");
388 tcu::TestStatus VideoDecodeTestInstance::iterateSingleFrame (void)
390 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED
391 | (m_queryWithStatusRequired ? VideoDevice::VIDEO_DEVICE_FLAG_QUERY_WITH_STATUS_FOR_DECODE_SUPPORT : 0);
392 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
393 const DeviceInterface& vkd = getDeviceDriver();
394 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
395 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
396 Allocator& allocator = getAllocator();
397 MovePtr<vector<deUint8>> videoData = loadTestVideoData();
398 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer (m_decoder->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData));
399 VkExtensionProperties stdExtensionVersion = getExtensionVersion(m_videoCodecOperation);
401 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser (m_decoder->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation, &stdExtensionVersion));
402 bool videoStreamHasEnded = false;
403 int32_t framesInQueue = 0;
404 int32_t frameNumber = 0;
405 int32_t framesCorrect = 0;
406 DecodedFrame frame = initDecodeFrame();
408 m_decoder->initialize(m_videoCodecOperation, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
410 if (!vulkanVideoDecodeParser->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder.get())))
412 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
415 while (framesInQueue > 0 || !videoStreamHasEnded)
417 framesInQueue = m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
419 while (framesInQueue == 0 && !videoStreamHasEnded)
421 if (!videoStreamHasEnded)
425 const bool demuxerSuccess = ffmpegDemuxer->demux(&pData, &size);
426 const bool parserSuccess = vulkanVideoDecodeParser->parseByteStream(pData, size);
428 if (!demuxerSuccess || !parserSuccess)
429 videoStreamHasEnded = true;
432 framesInQueue = m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
435 if (frame.pictureIndex >= 0)
437 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
438 const VkImage image = frame.pDecodedImage->getImage();
439 const VkFormat format = frame.pDecodedImage->getFormat();
440 const VkImageLayout layout = frame.decodedImageLayout;
441 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
443 if (verifyImage(frameNumber, *resultImage))
446 m_decoder->ReleaseDisplayedFrame(&frame);
449 if (frameNumber >= 1)
454 if (!vulkanVideoDecodeParser->deinitialize())
456 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
459 if (framesCorrect > 0 && framesCorrect == frameNumber)
460 return tcu::TestStatus::pass("pass");
462 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
465 tcu::TestStatus VideoDecodeTestInstance::iterateDoubleFrame (void)
467 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED
468 | (m_queryWithStatusRequired ? VideoDevice::VIDEO_DEVICE_FLAG_QUERY_WITH_STATUS_FOR_DECODE_SUPPORT : 0);
469 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
470 const DeviceInterface& vkd = getDeviceDriver();
471 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
472 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
473 Allocator& allocator = getAllocator();
474 MovePtr<vector<deUint8>> videoData = loadTestVideoData();
475 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer (m_decoder->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData));
476 VkExtensionProperties stdExtensionVersion = getExtensionVersion(m_videoCodecOperation);
478 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser (m_decoder->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation, &stdExtensionVersion));
479 bool videoStreamHasEnded = false;
480 int32_t framesInQueue = 0;
481 int32_t frameNumber = 0;
482 int32_t framesCorrect = 0;
483 DecodedFrame frames[2] = { initDecodeFrame(), initDecodeFrame() };
485 m_decoder->initialize(m_videoCodecOperation, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
487 if (!vulkanVideoDecodeParser->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder.get())))
489 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
492 while (framesInQueue > 0 || !videoStreamHasEnded)
494 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
496 while (framesInQueue < 2 && !videoStreamHasEnded)
498 if (!videoStreamHasEnded)
502 const bool demuxerSuccess = ffmpegDemuxer->demux(&pData, &size);
503 const bool parserSuccess = vulkanVideoDecodeParser->parseByteStream(pData, size);
505 if (!demuxerSuccess || !parserSuccess)
506 videoStreamHasEnded = true;
509 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
512 for (size_t frameNdx = 0; frameNdx < 2; ++frameNdx)
514 DecodedFrame& frame = frames[frameNdx];
516 m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
519 for (size_t frameNdx = 0; frameNdx < 2; ++frameNdx)
521 DecodedFrame& frame = frames[frameNdx];
522 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
523 const VkImage image = frame.pDecodedImage->getImage();
524 const VkFormat format = frame.pDecodedImage->getFormat();
525 const VkImageLayout layout = frame.decodedImageLayout;
527 if (frame.pictureIndex >= 0)
529 const bool assumeCorrect = m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS;
530 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
532 if (assumeCorrect || verifyImage(frameNumber, *resultImage))
535 m_decoder->ReleaseDisplayedFrame(&frame);
538 if (frameNumber >= DE_LENGTH_OF_ARRAY(frames))
543 if (frameNumber >= DE_LENGTH_OF_ARRAY(frames))
547 if (!vulkanVideoDecodeParser->deinitialize())
548 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
550 if (framesCorrect > 0 && framesCorrect == frameNumber)
551 return tcu::TestStatus::pass("pass");
553 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
556 tcu::TestStatus VideoDecodeTestInstance::iterateMultipleFrame (void)
558 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED
559 | (m_queryWithStatusRequired ? VideoDevice::VIDEO_DEVICE_FLAG_QUERY_WITH_STATUS_FOR_DECODE_SUPPORT : 0);
560 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
561 const DeviceInterface& vkd = getDeviceDriver();
562 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
563 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
564 Allocator& allocator = getAllocator();
565 MovePtr<vector<deUint8>> videoData = loadTestVideoData();
566 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer (m_decoder->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData));
567 VkExtensionProperties stdExtensionVersion = getExtensionVersion(m_videoCodecOperation);
569 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser (m_decoder->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation, &stdExtensionVersion));
570 bool videoStreamHasEnded = false;
571 int32_t framesInQueue = 0;
572 int32_t frameNumber = 0;
573 int32_t framesCorrect = 0;
574 vector<DecodedFrame> frames (m_frameCountTrigger, initDecodeFrame());
576 m_decoder->initialize(m_videoCodecOperation, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
578 if (!vulkanVideoDecodeParser->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder.get())))
579 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
581 while (framesInQueue > 0 || !videoStreamHasEnded)
583 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
585 while (framesInQueue < m_frameCountTrigger && !videoStreamHasEnded)
587 if (!videoStreamHasEnded)
591 const bool demuxerSuccess = ffmpegDemuxer->demux(&pData, &size);
592 const bool parserSuccess = vulkanVideoDecodeParser->parseByteStream(pData, size);
594 if (!demuxerSuccess || !parserSuccess)
595 videoStreamHasEnded = true;
598 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
601 for (int32_t frameNdx = 0; frameNdx < m_frameCountTrigger; ++frameNdx)
603 DecodedFrame& frame = frames[frameNdx];
605 m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
610 for (int32_t frameNdx = 0; frameNdx < m_frameCountTrigger; ++frameNdx)
612 DecodedFrame& frame = frames[frameNdx];
613 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
614 const VkImage image = frame.pDecodedImage->getImage();
615 const VkFormat format = frame.pDecodedImage->getFormat();
616 const VkImageLayout layout = frame.decodedImageLayout;
618 if (frame.pictureIndex >= 0)
620 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
622 if (success && verifyImageMultipleFrame(frameNumber, *resultImage))
627 m_decoder->ReleaseDisplayedFrame(&frame);
633 if (!vulkanVideoDecodeParser->deinitialize())
634 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
636 if (framesCorrect > 0 && framesCorrect == frameNumber)
637 return tcu::TestStatus::pass("pass");
639 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
642 bool VideoDecodeTestInstance::verifyImage (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
644 const tcu::UVec2 imageSize = multiPlaneImageData.getSize();
645 const uint32_t barCount = 10;
646 const uint32_t barWidth = 16;
647 const uint32_t barNum = uint32_t(frameNumber) % barCount;
648 const uint32_t edgeX = imageSize.x() - barWidth * barNum;
649 const uint32_t colorNdx = uint32_t(frameNumber) / barCount;
650 const int32_t refColorsV[] = { 240, 34, 110 };
651 const int32_t refColorsY[] = { 81, 145, 41 };
652 const int32_t refColorsU[] = { 90, 0, 0 };
653 const tcu::UVec4 refColorV = tcu::UVec4(refColorsV[colorNdx], 0, 0, 0);
654 const tcu::UVec4 refColorY = tcu::UVec4(refColorsY[colorNdx], 0, 0, 0);
655 const tcu::UVec4 refColorU = tcu::UVec4(refColorsU[colorNdx], 0, 0, 0);
656 const tcu::UVec4 refBlankV = tcu::UVec4(128, 0, 0, 0);
657 const tcu::UVec4 refBlankY = tcu::UVec4( 16, 0, 0, 0);
658 const tcu::UVec4 refBlankU = tcu::UVec4(128, 0, 0, 0);
659 tcu::ConstPixelBufferAccess outPixelBufferAccessV = multiPlaneImageData.getChannelAccess(0);
660 tcu::ConstPixelBufferAccess outPixelBufferAccessY = multiPlaneImageData.getChannelAccess(1);
661 tcu::ConstPixelBufferAccess outPixelBufferAccessU = multiPlaneImageData.getChannelAccess(2);
662 tcu::TextureLevel refPixelBufferV (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
663 tcu::TextureLevel refPixelBufferY (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
664 tcu::TextureLevel refPixelBufferU (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
665 tcu::PixelBufferAccess refPixelBufferAccessV = refPixelBufferV.getAccess();
666 tcu::PixelBufferAccess refPixelBufferAccessY = refPixelBufferY.getAccess();
667 tcu::PixelBufferAccess refPixelBufferAccessU = refPixelBufferU.getAccess();
668 tcu::TestLog& log = m_context.getTestContext().getLog();
669 const string titleV = "Rendered frame " + de::toString(frameNumber) + ". V Component";
670 const string titleY = "Rendered frame " + de::toString(frameNumber) + ". Y Component";
671 const string titleU = "Rendered frame " + de::toString(frameNumber) + ". U Component";
672 const tcu::UVec4 threshold = tcu::UVec4(0, 0, 0, 0);
674 for (uint32_t x = 0; x < imageSize.x(); ++x)
676 const tcu::UVec4& colorV = x < edgeX ? refColorV : refBlankV;
677 const tcu::UVec4& colorY = x < edgeX ? refColorY : refBlankY;
678 const tcu::UVec4& colorU = x < edgeX ? refColorU : refBlankU;
680 for (uint32_t y = 0; y < imageSize.y(); ++y)
682 refPixelBufferAccessV.setPixel(colorV, x, y);
683 refPixelBufferAccessY.setPixel(colorY, x, y);
684 refPixelBufferAccessU.setPixel(colorU, x, y);
688 const bool resultV = tcu::intThresholdCompare(log, titleV.c_str(), "", refPixelBufferAccessV, outPixelBufferAccessV, threshold, tcu::COMPARE_LOG_ON_ERROR);
689 const bool resultY = tcu::intThresholdCompare(log, titleY.c_str(), "", refPixelBufferAccessY, outPixelBufferAccessY, threshold, tcu::COMPARE_LOG_ON_ERROR);
690 const bool resultU = tcu::intThresholdCompare(log, titleU.c_str(), "", refPixelBufferAccessU, outPixelBufferAccessU, threshold, tcu::COMPARE_LOG_ON_ERROR);
692 return resultV && resultY && resultU;
695 bool VideoDecodeTestInstance::verifyImageMultipleFrame (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
697 const bool noReferenceTests = m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13
698 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER
699 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13
700 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER;
702 if (noReferenceTests)
704 const bool h264 = m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13
705 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER;
706 const vector<ReferencePixel> referencePixels264
708 ReferencePixel(tcu::IVec3( 0, 0, 0), tcu::IVec3( 124, 53, 140)),
709 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 0), tcu::IVec3( 131, 190, 115)),
710 ReferencePixel(tcu::IVec3( 0, 0, 12), tcu::IVec3( 140, 223, 92)),
711 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 12), tcu::IVec3( 138, 166, 98)),
713 const vector<ReferencePixel> referencePixels265
715 ReferencePixel(tcu::IVec3( 0, 0, 0), tcu::IVec3( 124, 55, 144)),
716 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 0), tcu::IVec3( 130, 190, 114)),
717 ReferencePixel(tcu::IVec3( 0, 0, 12), tcu::IVec3( 142, 210, 94)),
718 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 12), tcu::IVec3( 137, 166, 96)),
720 const vector<ReferencePixel>& referencePixels = h264 ? referencePixels264 : referencePixels265;
722 return verifyImageMultipleFrameNoReference(frameNumber, multiPlaneImageData, referencePixels);
725 return verifyImageMultipleFrameWithReference(frameNumber, multiPlaneImageData);
728 bool VideoDecodeTestInstance::verifyImageMultipleFrameWithReference (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
730 tcu::TestLog& log = m_context.getTestContext().getLog();
731 const bool firstHalf = frameNumber < 15;
732 const bool resolutionChange = m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE || m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB;
733 const uint32_t k = resolutionChange
734 ? (firstHalf ? 2 : 1)
736 const uint32_t cellSize = 16 * k;
737 const uint32_t cellCountX = 11;
738 const uint32_t cellCountV = 9;
739 const tcu::UVec2 imageSize = { cellSize * cellCountX, cellSize * cellCountV };
740 const string titleV = "Rendered frame " + de::toString(frameNumber) + ". V Component";
741 const tcu::UVec4 refColor0V = tcu::UVec4(128, 0, 0, 255);
742 const tcu::UVec4 refColor1V = tcu::UVec4(128, 0, 0, 255);
743 const tcu::UVec4& refColorV = firstHalf ? refColor0V : refColor1V;
744 const tcu::UVec4& refBlankV = firstHalf ? refColor1V : refColor0V;
745 tcu::TextureLevel refPixelBufferV (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
746 tcu::PixelBufferAccess refPixelBufferAccessV = refPixelBufferV.getAccess();
747 MovePtr<tcu::TextureLevel> outPixelBufferV = convertToRGBASized(multiPlaneImageData.getChannelAccess(0), imageSize);
748 tcu::PixelBufferAccess outPixelBufferAccessV = outPixelBufferV->getAccess();
749 const string titleY = "Rendered frame " + de::toString(frameNumber) + ". Y Component";
750 const tcu::UVec4 refColor0Y = tcu::UVec4(235, 0, 0, 255);
751 const tcu::UVec4 refColor1Y = tcu::UVec4( 16, 0, 0, 255);
752 const tcu::UVec4& refColorY = firstHalf ? refColor0Y : refColor1Y;
753 const tcu::UVec4& refBlankY = firstHalf ? refColor1Y : refColor0Y;
754 tcu::TextureLevel refPixelBufferY (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
755 tcu::PixelBufferAccess refPixelBufferAccessY = refPixelBufferY.getAccess();
756 MovePtr<tcu::TextureLevel> outPixelBufferY = convertToRGBASized(multiPlaneImageData.getChannelAccess(1), imageSize);
757 tcu::PixelBufferAccess outPixelBufferAccessY = outPixelBufferY->getAccess();
758 const string titleU = "Rendered frame " + de::toString(frameNumber) + ". U Component";
759 const tcu::UVec4 refColor0U = tcu::UVec4(128, 0, 0, 255);
760 const tcu::UVec4 refColor1U = tcu::UVec4(128, 0, 0, 255);
761 const tcu::UVec4& refColorU = firstHalf ? refColor0U : refColor1U;
762 const tcu::UVec4& refBlankU = firstHalf ? refColor1U : refColor0U;
763 tcu::TextureLevel refPixelBufferU (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
764 tcu::PixelBufferAccess refPixelBufferAccessU = refPixelBufferU.getAccess();
765 MovePtr<tcu::TextureLevel> outPixelBufferU = convertToRGBASized(multiPlaneImageData.getChannelAccess(2), imageSize);
766 tcu::PixelBufferAccess outPixelBufferAccessU = outPixelBufferU->getAccess();
767 const tcu::UVec4 threshold = tcu::UVec4(0, 0, 0, 0);
769 for (uint32_t x = 0; x < imageSize.x(); ++x)
770 for (uint32_t y = 0; y < imageSize.y(); ++y)
772 refPixelBufferAccessV.setPixel(refBlankV, x, y);
773 refPixelBufferAccessY.setPixel(refBlankY, x, y);
774 refPixelBufferAccessU.setPixel(refBlankU, x, y);
777 for (uint32_t cellNdx = 0; cellNdx <= frameNumber % 15; cellNdx++)
779 const uint32_t cellOfs = firstHalf ? 0 : 6 * cellSize;
780 const uint32_t cellX0 = cellSize * (cellNdx % 5);
781 const uint32_t cellV0 = cellSize * (cellNdx / 5) + cellOfs;
782 const uint32_t cellX1 = cellX0 + cellSize;
783 const uint32_t cellV1 = cellV0 + cellSize;
785 for (uint32_t x = cellX0; x < cellX1; ++x)
786 for (uint32_t y = cellV0; y < cellV1; ++y)
788 refPixelBufferAccessV.setPixel(refColorV, x, y);
789 refPixelBufferAccessY.setPixel(refColorY, x, y);
790 refPixelBufferAccessU.setPixel(refColorU, x, y);
794 const bool resultV = tcu::intThresholdCompare(log, titleV.c_str(), "", refPixelBufferAccessV, outPixelBufferAccessV, threshold, tcu::COMPARE_LOG_ON_ERROR);
795 const bool resultY = tcu::intThresholdCompare(log, titleY.c_str(), "", refPixelBufferAccessY, outPixelBufferAccessY, threshold, tcu::COMPARE_LOG_ON_ERROR);
796 const bool resultU = tcu::intThresholdCompare(log, titleU.c_str(), "", refPixelBufferAccessU, outPixelBufferAccessU, threshold, tcu::COMPARE_LOG_ON_ERROR);
798 return resultV && resultY && resultU;
801 bool VideoDecodeTestInstance::verifyImageMultipleFrameNoReference (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData, const vector<ReferencePixel>& referencePixels)
803 bool decodeFrame = false;
804 for (size_t i = 0; i < referencePixels.size(); i++)
805 if (referencePixels[i].first.z() == static_cast<int>(frameNumber))
810 MovePtr<tcu::TextureLevel> outPixelBufferV = convertToRGBA(multiPlaneImageData.getChannelAccess(0));
811 tcu::PixelBufferAccess outPixelBufferAccessV = outPixelBufferV->getAccess();
812 MovePtr<tcu::TextureLevel> outPixelBufferY = convertToRGBA(multiPlaneImageData.getChannelAccess(1));
813 tcu::PixelBufferAccess outPixelBufferAccessY = outPixelBufferY->getAccess();
814 MovePtr<tcu::TextureLevel> outPixelBufferU = convertToRGBA(multiPlaneImageData.getChannelAccess(2));
815 tcu::PixelBufferAccess outPixelBufferAccessU = outPixelBufferU->getAccess();
816 tcu::TestLog& log = m_context.getTestContext().getLog();
818 log << tcu::TestLog::Message << "TODO: WARNING: ONLY FEW PIXELS ARE CHECKED\n" << tcu::TestLog::EndMessage;
820 log << tcu::TestLog::ImageSet("Frame", "")
821 << tcu::TestLog::Image("Result V", "Result V", outPixelBufferAccessV)
822 << tcu::TestLog::Image("Result Y", "Result Y", outPixelBufferAccessY)
823 << tcu::TestLog::Image("Result U", "Result U", outPixelBufferAccessU)
824 << tcu::TestLog::EndImageSet;
826 for (size_t i = 0; i < referencePixels.size(); i++)
827 if (referencePixels[i].first.z() == static_cast<int>(frameNumber))
829 const tcu::IVec3& pos = referencePixels[i].first;
830 const tcu::IVec3& ref = referencePixels[i].second;
831 const tcu::IVec3 value = tcu::IVec3(outPixelBufferAccessV.getPixelInt(pos.x(), pos.y()).x(),
832 outPixelBufferAccessY.getPixelInt(pos.x(), pos.y()).x(),
833 outPixelBufferAccessU.getPixelInt(pos.x(), pos.y()).x());
843 class DualVideoDecodeTestInstance : public VideoBaseTestInstance
846 DualVideoDecodeTestInstance (Context& context,
847 const CaseDef& data);
848 ~DualVideoDecodeTestInstance (void);
850 MovePtr<vector<deUint8>> loadTestVideoData (bool primary);
852 tcu::TestStatus iterate (void);
853 bool verifyImage (bool firstClip,
855 const MultiPlaneImageData& multiPlaneImageData);
858 MovePtr<VideoBaseDecoder> m_decoder1;
859 MovePtr<VideoBaseDecoder> m_decoder2;
860 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation;
861 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation1;
862 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation2;
863 int32_t m_frameCountTrigger;
866 DualVideoDecodeTestInstance::DualVideoDecodeTestInstance (Context& context, const CaseDef& data)
867 : VideoBaseTestInstance (context)
869 , m_decoder1 (new VideoBaseDecoder(context))
870 , m_decoder2 (new VideoBaseDecoder(context))
871 , m_videoCodecOperation (VK_VIDEO_CODEC_OPERATION_NONE_KHR)
872 , m_videoCodecOperation1 (VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR)
873 , m_videoCodecOperation2 (VK_VIDEO_CODEC_OPERATION_NONE_KHR)
874 , m_frameCountTrigger (10)
876 const bool randomOrSwapped = false;
877 const bool queryResultWithStatus = false;
879 m_decoder1->setDecodeParameters(randomOrSwapped, queryResultWithStatus, m_frameCountTrigger + 1);
880 m_decoder2->setDecodeParameters(randomOrSwapped, queryResultWithStatus, m_frameCountTrigger + 1);
882 m_videoCodecOperation2 = m_caseDef.testType == TEST_TYPE_H264_DECODE_INTERLEAVED ? VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR
883 : m_caseDef.testType == TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED ? VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT
884 : m_caseDef.testType == TEST_TYPE_H264_H265_DECODE_INTERLEAVED ? VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
885 : VK_VIDEO_CODEC_OPERATION_NONE_KHR;
887 DE_ASSERT(m_videoCodecOperation2 != VK_VIDEO_CODEC_OPERATION_NONE_KHR);
889 m_videoCodecOperation = static_cast<VkVideoCodecOperationFlagBitsKHR>(m_videoCodecOperation1 | m_videoCodecOperation2);
891 if (m_videoCodecOperation2 == VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT)
892 TCU_THROW(NotSupportedError, "NOT IMPLEMENTED: REQUIRES ENCODE QUEUE");
895 DualVideoDecodeTestInstance::~DualVideoDecodeTestInstance (void)
899 MovePtr<vector<deUint8>> DualVideoDecodeTestInstance::loadTestVideoData (bool primary)
901 switch (m_caseDef.testType)
903 case TEST_TYPE_H264_DECODE_INTERLEAVED: return primary ? loadVideoDataClipA() : loadVideoDataClipB();
904 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED: return loadVideoDataClipA();
905 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED: return primary ? loadVideoDataClipA() : loadVideoDataClipD();
906 default: TCU_THROW(InternalError, "Unknown testType");
910 tcu::TestStatus DualVideoDecodeTestInstance::iterate (void)
912 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED;
913 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
914 const DeviceInterface& vkd = getDeviceDriver();
915 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
916 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
917 Allocator& allocator = getAllocator();
918 MovePtr<vector<deUint8>> videoData1 = loadTestVideoData(true);
919 MovePtr<vector<deUint8>> videoData2 = loadTestVideoData(false);
920 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer1 (m_decoder1->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData1));
921 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer2 (m_decoder2->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData2));
922 VkExtensionProperties stdExtensionVersion1 = getExtensionVersion(m_videoCodecOperation1);
923 VkExtensionProperties stdExtensionVersion2 = getExtensionVersion(m_videoCodecOperation2);
925 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser1 (m_decoder1->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation1, &stdExtensionVersion1));
926 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser2 (m_decoder2->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation2, &stdExtensionVersion2));
927 int32_t frameNumber = 0;
928 int32_t framesCorrect = 0;
929 vector<DecodedFrame> frames (m_frameCountTrigger, initDecodeFrame());
931 m_decoder1->initialize(m_videoCodecOperation1, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
933 if (!vulkanVideoDecodeParser1->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder1.get())))
935 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
938 m_decoder2->initialize(m_videoCodecOperation2, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
940 if (!vulkanVideoDecodeParser2->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder2.get())))
942 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
946 bool videoStreamHasEnded = false;
947 int32_t framesInQueue = 0;
949 while (framesInQueue < m_frameCountTrigger && !videoStreamHasEnded)
953 const bool demuxerSuccess = ffmpegDemuxer1->demux(&pData, &size);
954 const bool parserSuccess = vulkanVideoDecodeParser1->parseByteStream(pData, size);
956 if (!demuxerSuccess || !parserSuccess)
957 videoStreamHasEnded = true;
959 framesInQueue = m_decoder1->GetVideoFrameBuffer()->GetDisplayFramesCount();
964 bool videoStreamHasEnded = false;
965 int32_t framesInQueue = 0;
967 while (framesInQueue < m_frameCountTrigger && !videoStreamHasEnded)
971 const bool demuxerSuccess = ffmpegDemuxer2->demux(&pData, &size);
972 const bool parserSuccess = vulkanVideoDecodeParser2->parseByteStream(pData, size);
974 if (!demuxerSuccess || !parserSuccess)
975 videoStreamHasEnded = true;
977 framesInQueue = m_decoder2->GetVideoFrameBuffer()->GetDisplayFramesCount();
981 m_decoder1->DecodeCachedPictures(m_decoder2.get());
983 for (size_t decoderNdx = 0; decoderNdx < 2; ++decoderNdx)
985 const bool firstDecoder = (decoderNdx == 0);
986 VideoBaseDecoder* decoder = firstDecoder ? m_decoder1.get() : m_decoder2.get();
987 const bool firstClip = firstDecoder ? true
988 : m_caseDef.testType == TEST_TYPE_H264_H265_DECODE_INTERLEAVED;
990 for (int32_t frameNdx = 0; frameNdx < m_frameCountTrigger; ++frameNdx)
992 decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frames[frameNdx]);
994 DecodedFrame& frame = frames[frameNdx];
995 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
996 const VkImage image = frame.pDecodedImage->getImage();
997 const VkFormat format = frame.pDecodedImage->getFormat();
998 const VkImageLayout layout = frame.decodedImageLayout;
1000 if (frame.pictureIndex >= 0)
1002 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
1004 if (verifyImage(firstClip, frameNdx, *resultImage))
1007 decoder->ReleaseDisplayedFrame(&frame);
1013 if (!vulkanVideoDecodeParser2->deinitialize())
1014 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
1016 if (!vulkanVideoDecodeParser1->deinitialize())
1017 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
1019 if (framesCorrect > 0 && framesCorrect == frameNumber)
1020 return tcu::TestStatus::pass("pass");
1022 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
1025 bool DualVideoDecodeTestInstance::verifyImage (bool firstClip, int32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
1027 const tcu::UVec2 imageSize = multiPlaneImageData.getSize();
1028 const uint32_t k = firstClip ? 1 : 2;
1029 const uint32_t barCount = 10;
1030 const uint32_t barWidth = 16 * k;
1031 const uint32_t barNum = uint32_t(frameNumber) % barCount;
1032 const uint32_t edgeX = imageSize.x() - barWidth * barNum;
1033 const uint32_t colorNdx = uint32_t(frameNumber) / barCount;
1034 const int32_t refColorsV1[] = { 240, 34, 110 };
1035 const int32_t refColorsY1[] = { 81, 145, 41 };
1036 const int32_t refColorsU1[] = { 90, 0, 0 };
1037 const int32_t refColorsV2[] = { 16, 0, 0 };
1038 const int32_t refColorsY2[] = { 170, 0, 0 };
1039 const int32_t refColorsU2[] = { 166, 0, 0 };
1040 const tcu::UVec4 refColorV = tcu::UVec4(firstClip ? refColorsV1[colorNdx] : refColorsV2[colorNdx], 0, 0, 0);
1041 const tcu::UVec4 refColorY = tcu::UVec4(firstClip ? refColorsY1[colorNdx] : refColorsY2[colorNdx], 0, 0, 0);
1042 const tcu::UVec4 refColorU = tcu::UVec4(firstClip ? refColorsU1[colorNdx] : refColorsU2[colorNdx], 0, 0, 0);
1043 const tcu::UVec4 refBlankV = tcu::UVec4(128, 0, 0, 0);
1044 const tcu::UVec4 refBlankY = tcu::UVec4( 16, 0, 0, 0);
1045 const tcu::UVec4 refBlankU = tcu::UVec4(128, 0, 0, 0);
1046 tcu::ConstPixelBufferAccess outPixelBufferAccessV = multiPlaneImageData.getChannelAccess(0);
1047 tcu::ConstPixelBufferAccess outPixelBufferAccessY = multiPlaneImageData.getChannelAccess(1);
1048 tcu::ConstPixelBufferAccess outPixelBufferAccessU = multiPlaneImageData.getChannelAccess(2);
1049 tcu::TextureLevel refPixelBufferV (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
1050 tcu::TextureLevel refPixelBufferY (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
1051 tcu::TextureLevel refPixelBufferU (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
1052 tcu::PixelBufferAccess refPixelBufferAccessV = refPixelBufferV.getAccess();
1053 tcu::PixelBufferAccess refPixelBufferAccessY = refPixelBufferY.getAccess();
1054 tcu::PixelBufferAccess refPixelBufferAccessU = refPixelBufferU.getAccess();
1055 tcu::TestLog& log = m_context.getTestContext().getLog();
1056 const string titleV = "Rendered frame " + de::toString(frameNumber) + ". V Component";
1057 const string titleY = "Rendered frame " + de::toString(frameNumber) + ". Y Component";
1058 const string titleU = "Rendered frame " + de::toString(frameNumber) + ". U Component";
1059 const tcu::UVec4 threshold = tcu::UVec4(0, 0, 0, 0);
1061 for (uint32_t x = 0; x < imageSize.x(); ++x)
1063 const tcu::UVec4& colorV = (x < edgeX) ? refColorV : refBlankV;
1064 const tcu::UVec4& colorY = (x < edgeX) ? refColorY : refBlankY;
1065 const tcu::UVec4& colorU = (x < edgeX) ? refColorU : refBlankU;
1067 for (uint32_t y = 0; y < imageSize.y(); ++y)
1069 refPixelBufferAccessV.setPixel(colorV, x, y);
1070 refPixelBufferAccessY.setPixel(colorY, x, y);
1071 refPixelBufferAccessU.setPixel(colorU, x, y);
1075 const bool resultV = tcu::intThresholdCompare(log, titleV.c_str(), "", refPixelBufferAccessV, outPixelBufferAccessV, threshold, tcu::COMPARE_LOG_ON_ERROR);
1076 const bool resultY = tcu::intThresholdCompare(log, titleY.c_str(), "", refPixelBufferAccessY, outPixelBufferAccessY, threshold, tcu::COMPARE_LOG_ON_ERROR);
1077 const bool resultU = tcu::intThresholdCompare(log, titleU.c_str(), "", refPixelBufferAccessU, outPixelBufferAccessU, threshold, tcu::COMPARE_LOG_ON_ERROR);
1079 return resultV && resultY && resultU;
1081 #endif // DE_OS != DE_OS_ANDROID
1082 class VideoDecodeTestCase : public TestCase
1085 VideoDecodeTestCase (tcu::TestContext& context, const char* name, const char* desc, const CaseDef caseDef);
1086 ~VideoDecodeTestCase (void);
1088 virtual TestInstance* createInstance (Context& context) const;
1089 virtual void checkSupport (Context& context) const;
1095 VideoDecodeTestCase::VideoDecodeTestCase (tcu::TestContext& context, const char* name, const char* desc, const CaseDef caseDef)
1096 : vkt::TestCase (context, name, desc)
1097 , m_caseDef (caseDef)
1101 VideoDecodeTestCase::~VideoDecodeTestCase (void)
1105 void VideoDecodeTestCase::checkSupport (Context& context) const
1107 context.requireDeviceFunctionality("VK_KHR_video_queue");
1108 context.requireDeviceFunctionality("VK_KHR_synchronization2");
1110 switch (m_caseDef.testType)
1112 case TEST_TYPE_H264_DECODE_I:
1113 case TEST_TYPE_H264_DECODE_I_P:
1114 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER:
1115 case TEST_TYPE_H264_DECODE_I_P_B_13:
1116 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1117 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS:
1118 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE:
1119 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB:
1120 case TEST_TYPE_H264_DECODE_INTERLEAVED:
1121 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED:
1123 context.requireDeviceFunctionality("VK_KHR_video_decode_h264");
1126 case TEST_TYPE_H265_DECODE_I:
1127 case TEST_TYPE_H265_DECODE_I_P:
1128 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER:
1129 case TEST_TYPE_H265_DECODE_I_P_B_13:
1130 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1132 context.requireDeviceFunctionality("VK_KHR_video_decode_h265");
1135 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED:
1137 context.requireDeviceFunctionality("VK_KHR_video_decode_h264");
1138 context.requireDeviceFunctionality("VK_KHR_video_decode_h265");
1142 TCU_THROW(InternalError, "Unknown TestType");
1146 TestInstance* VideoDecodeTestCase::createInstance (Context& context) const
1148 // Vulkan video is unsupported for android platform
1149 switch (m_caseDef.testType)
1151 case TEST_TYPE_H264_DECODE_I:
1152 case TEST_TYPE_H264_DECODE_I_P:
1153 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER:
1154 case TEST_TYPE_H264_DECODE_I_P_B_13:
1155 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1156 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS:
1157 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE:
1158 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB:
1159 case TEST_TYPE_H265_DECODE_I:
1160 case TEST_TYPE_H265_DECODE_I_P:
1161 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER:
1162 case TEST_TYPE_H265_DECODE_I_P_B_13:
1163 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1165 #if (DE_OS != DE_OS_ANDROID)
1166 return new VideoDecodeTestInstance(context, m_caseDef);
1169 case TEST_TYPE_H264_DECODE_INTERLEAVED:
1170 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED:
1171 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED:
1173 #if (DE_OS != DE_OS_ANDROID)
1174 return new DualVideoDecodeTestInstance(context, m_caseDef);
1178 TCU_THROW(InternalError, "Unknown TestType");
1180 #if (DE_OS == DE_OS_ANDROID)
1186 const char* getTestName (const TestType testType)
1190 case TEST_TYPE_H264_DECODE_I: return "h264_i";
1191 case TEST_TYPE_H264_DECODE_I_P: return "h264_i_p";
1192 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER: return "h264_i_p_not_matching_order";
1193 case TEST_TYPE_H264_DECODE_I_P_B_13: return "h264_i_p_b_13";
1194 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return "h264_i_p_b_13_not_matching_order";
1195 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS: return "h264_query_with_status";
1196 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE: return "h264_resolution_change";
1197 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB: return "h264_resolution_change_dpb";
1198 case TEST_TYPE_H264_DECODE_INTERLEAVED: return "h264_interleaved";
1199 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED: return "h264_decode_encode_interleaved";
1200 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED: return "h264_h265_interleaved";
1201 case TEST_TYPE_H265_DECODE_I: return "h265_i";
1202 case TEST_TYPE_H265_DECODE_I_P: return "h265_i_p";
1203 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER: return "h265_i_p_not_matching_order";
1204 case TEST_TYPE_H265_DECODE_I_P_B_13: return "h265_i_p_b_13";
1205 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return "h265_i_p_b_13_not_matching_order";
1206 default: TCU_THROW(InternalError, "Unknown TestType");
1211 tcu::TestCaseGroup* createVideoDecodeTests (tcu::TestContext& testCtx)
1213 MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "decode", "Video decoding session tests"));
1215 for (int testTypeNdx = 0; testTypeNdx < TEST_TYPE_LAST; ++testTypeNdx)
1217 const TestType testType = static_cast<TestType>(testTypeNdx);
1218 const CaseDef caseDef =
1220 testType, // TestType testType;
1223 group->addChild(new VideoDecodeTestCase(testCtx, getTestName(testType), "", caseDef));
1226 return group.release();