1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2021 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Video Decoding Session tests
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktVideoDecodeTests.hpp"
25 #include "vktVideoTestUtils.hpp"
26 #include "vktTestCase.hpp"
27 #include "vktVideoPictureUtils.hpp"
28
29 #include "tcuTextureUtil.hpp"
30 #include "tcuVectorUtil.hpp"
31 #include "tcuTestLog.hpp"
32 #include "tcuPlatform.hpp"
33 #include "tcuFunctionLibrary.hpp"
34 #include "tcuImageCompare.hpp"
35
36 #include <deDefs.h>
37 #include "vkDefs.hpp"
38 #include "vkBufferWithMemory.hpp"
39 #include "vkImageWithMemory.hpp"
40 #include "vkImageUtil.hpp"
41 #include "vkBarrierUtil.hpp"
42 #include "vkObjUtil.hpp"
43 #include "vkCmdUtil.hpp"
44 #include "vkTypeUtil.hpp"
45
46 #include "../ycbcr/vktYCbCrUtil.hpp"
47
48 #ifdef DE_BUILD_VIDEO
49 #include "vktVideoSessionNvUtils.hpp"
50 #include "vktVideoSessionFfmpegUtils.hpp"
51 #include "vktVideoBaseDecodeUtils.hpp"
52 #endif
53
54
55 #include <atomic>
56
57 namespace vkt
58 {
59 namespace video
60 {
61 namespace
62 {
63 using namespace vk;
64 using namespace std;
65
66 using de::MovePtr;
67 using vkt::ycbcr::MultiPlaneImageData;
68
69
70 enum TestType
71 {
72 TEST_TYPE_H264_DECODE_I, // Case 6
73 TEST_TYPE_H264_DECODE_I_P, // Case 7
74 TEST_TYPE_H264_DECODE_I_P_B_13, // Case 7a
75 TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER, // Case 8
76 TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER, // Case 8a
77 TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS, // Case 9
78 TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE, // Case 17
79 TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB, // Case 18
80 TEST_TYPE_H264_DECODE_INTERLEAVED, // Case 21
81 TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED, // Case 23
82 TEST_TYPE_H264_H265_DECODE_INTERLEAVED, // Case 24
83
84 TEST_TYPE_H265_DECODE_I, // Case 15
85 TEST_TYPE_H265_DECODE_I_P, // Case 16
86 TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER, // Case 16-2
87 TEST_TYPE_H265_DECODE_I_P_B_13 , // Case 16-3
88 TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER, // Case 16-4
89
90 TEST_TYPE_LAST
91 };
92
93 struct CaseDef
94 {
95 TestType testType;
96 };
97
98 // Vulkan video is not supported on android platform
99 // all external libraries, helper functions and test instances has been excluded
100 #ifdef DE_BUILD_VIDEO
initDecodeFrame(void)101 DecodedFrame initDecodeFrame (void)
102 {
103 DecodedFrame frameTemplate =
104 {
105 -1, // int32_t pictureIndex;
106 DE_NULL, // const ImageObject* pDecodedImage;
107 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout decodedImageLayout;
108 DE_NULL, // VkFence frameCompleteFence;
109 DE_NULL, // VkFence frameConsumerDoneFence;
110 DE_NULL, // VkSemaphore frameCompleteSemaphore;
111 DE_NULL, // VkSemaphore frameConsumerDoneSemaphore;
112 DE_NULL, // VkQueryPool queryPool;
113 0, // int32_t startQueryId;
114 0, // uint32_t numQueries;
115 0, // uint64_t timestamp;
116 0, // uint32_t hasConsummerSignalFence : 1;
117 0, // uint32_t hasConsummerSignalSemaphore : 1;
118 0, // int32_t decodeOrder;
119 0, // int32_t displayOrder;
120 };
121
122 return frameTemplate;
123 }
124
125 // Avoid useless sampler in writeImage 2.5x faster
convertToRGBASized(const tcu::ConstPixelBufferAccess & src,const tcu::UVec2 & size)126 MovePtr<tcu::TextureLevel> convertToRGBASized (const tcu::ConstPixelBufferAccess& src, const tcu::UVec2& size)
127 {
128 const tcu::TextureFormat format (tcu::TextureFormat::RGB, tcu::TextureFormat::UNORM_INT8);
129 MovePtr<tcu::TextureLevel> result (new tcu::TextureLevel(format, size.x(), size.y()));
130 tcu::PixelBufferAccess access (result->getAccess());
131
132 for (int y = 0; y < result->getHeight(); ++y)
133 for (int x = 0; x < result->getWidth(); ++x)
134 access.setPixel(src.getPixelUint(x, y), x, y);
135
136 return result;
137 }
138
convertToRGBA(const tcu::ConstPixelBufferAccess & src)139 MovePtr<tcu::TextureLevel> convertToRGBA (const tcu::ConstPixelBufferAccess& src)
140 {
141 return convertToRGBASized(src, tcu::UVec2((uint32_t)src.getWidth(), (uint32_t)src.getHeight()));
142 }
143
getDecodedImage(const DeviceInterface & vkd,VkDevice device,Allocator & allocator,VkImage image,VkImageLayout layout,VkFormat format,VkExtent2D codedExtent,deUint32 queueFamilyIndexTransfer,deUint32 queueFamilyIndexDecode)144 MovePtr<MultiPlaneImageData> getDecodedImage (const DeviceInterface& vkd,
145 VkDevice device,
146 Allocator& allocator,
147 VkImage image,
148 VkImageLayout layout,
149 VkFormat format,
150 VkExtent2D codedExtent,
151 deUint32 queueFamilyIndexTransfer,
152 deUint32 queueFamilyIndexDecode)
153 {
154 MovePtr<MultiPlaneImageData> multiPlaneImageData (new MultiPlaneImageData(format, tcu::UVec2(codedExtent.width, codedExtent.height)));
155 const VkQueue queueDecode = getDeviceQueue(vkd, device, queueFamilyIndexDecode, 0u);
156 const VkQueue queueTransfer = getDeviceQueue(vkd, device, queueFamilyIndexTransfer, 0u);
157 const VkImageSubresourceRange imageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
158 const VkImageMemoryBarrier2KHR imageBarrierDecode = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR,
159 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR,
160 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
161 VK_ACCESS_NONE_KHR,
162 layout,
163 VK_IMAGE_LAYOUT_GENERAL,
164 image,
165 imageSubresourceRange);
166 const VkImageMemoryBarrier2KHR imageBarrierOwnershipDecode = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
167 VK_ACCESS_NONE_KHR,
168 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
169 VK_ACCESS_NONE_KHR,
170 VK_IMAGE_LAYOUT_GENERAL,
171 VK_IMAGE_LAYOUT_GENERAL,
172 image,
173 imageSubresourceRange,
174 queueFamilyIndexDecode,
175 queueFamilyIndexTransfer);
176 const VkImageMemoryBarrier2KHR imageBarrierOwnershipTransfer = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
177 VK_ACCESS_NONE_KHR,
178 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
179 VK_ACCESS_NONE_KHR,
180 VK_IMAGE_LAYOUT_GENERAL,
181 VK_IMAGE_LAYOUT_GENERAL,
182 image,
183 imageSubresourceRange,
184 queueFamilyIndexDecode,
185 queueFamilyIndexTransfer);
186 const VkImageMemoryBarrier2KHR imageBarrierTransfer = makeImageMemoryBarrier2(VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR,
187 VK_ACCESS_2_TRANSFER_READ_BIT_KHR,
188 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
189 VK_ACCESS_NONE_KHR,
190 VK_IMAGE_LAYOUT_GENERAL,
191 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
192 image,
193 imageSubresourceRange);
194 const Move<VkCommandPool> cmdDecodePool (makeCommandPool(vkd, device, queueFamilyIndexDecode));
195 const Move<VkCommandBuffer> cmdDecodeBuffer (allocateCommandBuffer(vkd, device, *cmdDecodePool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
196 const Move<VkCommandPool> cmdTransferPool (makeCommandPool(vkd, device, queueFamilyIndexTransfer));
197 const Move<VkCommandBuffer> cmdTransferBuffer (allocateCommandBuffer(vkd, device, *cmdTransferPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
198 Move<VkSemaphore> semaphore = createSemaphore(vkd, device);
199 Move<VkFence> decodeFence = createFence(vkd, device);
200 Move<VkFence> transferFence = createFence(vkd, device);
201 VkFence fences[] = { *decodeFence, *transferFence };
202 const VkPipelineStageFlags waitDstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
203 const VkSubmitInfo decodeSubmitInfo
204 {
205 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
206 DE_NULL, // const void* pNext;
207 0u, // deUint32 waitSemaphoreCount;
208 DE_NULL, // const VkSemaphore* pWaitSemaphores;
209 DE_NULL, // const VkPipelineStageFlags* pWaitDstStageMask;
210 1u, // deUint32 commandBufferCount;
211 &*cmdDecodeBuffer, // const VkCommandBuffer* pCommandBuffers;
212 1u, // deUint32 signalSemaphoreCount;
213 &*semaphore, // const VkSemaphore* pSignalSemaphores;
214 };
215 const VkSubmitInfo transferSubmitInfo
216 {
217 VK_STRUCTURE_TYPE_SUBMIT_INFO, // VkStructureType sType;
218 DE_NULL, // const void* pNext;
219 1u, // deUint32 waitSemaphoreCount;
220 &*semaphore, // const VkSemaphore* pWaitSemaphores;
221 &waitDstStageMask, // const VkPipelineStageFlags* pWaitDstStageMask;
222 1u, // deUint32 commandBufferCount;
223 &*cmdTransferBuffer, // const VkCommandBuffer* pCommandBuffers;
224 0u, // deUint32 signalSemaphoreCount;
225 DE_NULL, // const VkSemaphore* pSignalSemaphores;
226 };
227
228 DEBUGLOG(std::cout << "getDecodedImage: " << image << " " << layout << std::endl);
229
230 beginCommandBuffer(vkd, *cmdDecodeBuffer, 0u);
231 cmdPipelineImageMemoryBarrier2(vkd, *cmdDecodeBuffer, &imageBarrierDecode);
232 cmdPipelineImageMemoryBarrier2(vkd, *cmdDecodeBuffer, &imageBarrierOwnershipDecode);
233 endCommandBuffer(vkd, *cmdDecodeBuffer);
234
235 beginCommandBuffer(vkd, *cmdTransferBuffer, 0u);
236 cmdPipelineImageMemoryBarrier2(vkd, *cmdTransferBuffer, &imageBarrierOwnershipTransfer);
237 cmdPipelineImageMemoryBarrier2(vkd, *cmdTransferBuffer, &imageBarrierTransfer);
238 endCommandBuffer(vkd, *cmdTransferBuffer);
239
240 VK_CHECK(vkd.queueSubmit(queueDecode, 1u, &decodeSubmitInfo, *decodeFence));
241 VK_CHECK(vkd.queueSubmit(queueTransfer, 1u, &transferSubmitInfo, *transferFence));
242
243 VK_CHECK(vkd.waitForFences(device, DE_LENGTH_OF_ARRAY(fences), fences, DE_TRUE, ~0ull));
244
245 vkt::ycbcr::downloadImage(vkd, device, queueFamilyIndexTransfer, allocator, image, multiPlaneImageData.get(), 0, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
246
247 return multiPlaneImageData;
248 }
249
250 class VideoDecodeTestInstance : public VideoBaseTestInstance
251 {
252 public:
253 typedef std::pair<tcu::IVec3, tcu::IVec3> ReferencePixel;
254
255 VideoDecodeTestInstance (Context& context,
256 const CaseDef& data);
257 ~VideoDecodeTestInstance (void);
258
259 MovePtr<vector<deUint8>> loadTestVideoData (void);
260
261 tcu::TestStatus iterate (void);
262 tcu::TestStatus iterateSingleFrame (void);
263 tcu::TestStatus iterateDoubleFrame (void);
264 tcu::TestStatus iterateMultipleFrame (void);
265 bool verifyImage (uint32_t frameNumber,
266 const MultiPlaneImageData& multiPlaneImageData);
267 bool verifyImageMultipleFrame (uint32_t frameNumber,
268 const MultiPlaneImageData& multiPlaneImageData);
269 bool verifyImageMultipleFrameNoReference (uint32_t frameNumber,
270 const MultiPlaneImageData& multiPlaneImageData,
271 const vector<ReferencePixel>& referencePixels);
272 bool verifyImageMultipleFrameWithReference (uint32_t frameNumber,
273 const MultiPlaneImageData& multiPlaneImageData);
274 protected:
275 CaseDef m_caseDef;
276 MovePtr<VideoBaseDecoder> m_decoder;
277 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation;
278 int32_t m_frameCountTrigger;
279 bool m_queryWithStatusRequired;
280 };
281
VideoDecodeTestInstance(Context & context,const CaseDef & data)282 VideoDecodeTestInstance::VideoDecodeTestInstance (Context& context, const CaseDef& data)
283 : VideoBaseTestInstance (context)
284 , m_caseDef (data)
285 , m_decoder (new VideoBaseDecoder(context))
286 , m_videoCodecOperation (VK_VIDEO_CODEC_OPERATION_NONE_KHR)
287 , m_frameCountTrigger (0)
288 , m_queryWithStatusRequired (false)
289 {
290 const bool queryResultWithStatus = m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS;
291 const bool twoCachedPicturesSwapped = queryResultWithStatus
292 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER
293 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER;
294 const bool randomOrSwapped = twoCachedPicturesSwapped
295 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER
296 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER;
297 const uint32_t gopSize = m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE ? 15
298 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB ? 15
299 : 0;
300 const uint32_t gopCount = m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE ? 2
301 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB ? 1
302 : 0;
303 const bool submitDuringRecord = m_caseDef.testType == TEST_TYPE_H264_DECODE_I
304 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P
305 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I
306 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P;
307 const bool submitAfter = !submitDuringRecord;
308
309 m_frameCountTrigger = m_caseDef.testType == TEST_TYPE_H264_DECODE_I ? 1
310 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P ? 2
311 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13 ? 13 * 2
312 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER ? 2
313 : m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER ? 13 * 2
314 : m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS ? 2
315 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE ? 15 * 2
316 : m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB ? 15 * 2
317 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I ? 1
318 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P ? 2
319 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER ? 2
320 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13 ? 13 * 2
321 : m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER ? 13 * 2
322 : 0;
323
324 m_decoder->setDecodeParameters(randomOrSwapped, queryResultWithStatus, m_frameCountTrigger, submitAfter, gopSize, gopCount);
325
326 m_videoCodecOperation = de::inBounds(m_caseDef.testType, TEST_TYPE_H264_DECODE_I, TEST_TYPE_H265_DECODE_I) ? VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR
327 : de::inBounds(m_caseDef.testType, TEST_TYPE_H265_DECODE_I, TEST_TYPE_LAST) ? VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
328 : VK_VIDEO_CODEC_OPERATION_NONE_KHR;
329
330 DE_ASSERT(m_videoCodecOperation != VK_VIDEO_CODEC_OPERATION_NONE_KHR);
331
332 m_queryWithStatusRequired = (m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS);
333 }
334
~VideoDecodeTestInstance(void)335 VideoDecodeTestInstance::~VideoDecodeTestInstance (void)
336 {
337 }
338
loadTestVideoData(void)339 MovePtr<vector<deUint8>> VideoDecodeTestInstance::loadTestVideoData (void)
340 {
341 switch (m_caseDef.testType)
342 {
343 case TEST_TYPE_H264_DECODE_I:
344 case TEST_TYPE_H264_DECODE_I_P:
345 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER:
346 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS: return loadVideoDataClipA();
347 case TEST_TYPE_H264_DECODE_I_P_B_13:
348 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return loadVideoDataClipH264G13();
349 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE:
350 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB: return loadVideoDataClipC();
351 case TEST_TYPE_H265_DECODE_I:
352 case TEST_TYPE_H265_DECODE_I_P:
353 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER: return loadVideoDataClipD();
354 case TEST_TYPE_H265_DECODE_I_P_B_13:
355 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return loadVideoDataClipH265G13();
356
357 default: TCU_THROW(InternalError, "Unknown testType");
358 }
359 }
360
iterate(void)361 tcu::TestStatus VideoDecodeTestInstance::iterate (void)
362 {
363 if (m_frameCountTrigger == 1)
364 return iterateSingleFrame();
365 else if (m_frameCountTrigger == 2)
366 return iterateDoubleFrame();
367 else
368 return iterateMultipleFrame();
369 }
370
getExtensionVersion(VkVideoCodecOperationFlagBitsKHR videoCodecOperation)371 vk::VkExtensionProperties getExtensionVersion (VkVideoCodecOperationFlagBitsKHR videoCodecOperation)
372 {
373 static const vk::VkExtensionProperties h264StdExtensionVersion = { VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME, VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION };
374 static const vk::VkExtensionProperties h265StdExtensionVersion = { VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME, VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION };
375
376 if (videoCodecOperation == VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR) {
377 return h264StdExtensionVersion;
378 } else if (videoCodecOperation == VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR) {
379 return h265StdExtensionVersion;
380 }
381
382 TCU_THROW(InternalError, "Unsupported Codec Type");
383 }
384
385
iterateSingleFrame(void)386 tcu::TestStatus VideoDecodeTestInstance::iterateSingleFrame (void)
387 {
388 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED
389 | (m_queryWithStatusRequired ? VideoDevice::VIDEO_DEVICE_FLAG_QUERY_WITH_STATUS_FOR_DECODE_SUPPORT : 0);
390 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
391 const DeviceInterface& vkd = getDeviceDriver();
392 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
393 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
394 Allocator& allocator = getAllocator();
395 MovePtr<vector<deUint8>> videoData = loadTestVideoData();
396 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer (m_decoder->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData));
397 VkExtensionProperties stdExtensionVersion = getExtensionVersion(m_videoCodecOperation);
398
399 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser (m_decoder->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation, &stdExtensionVersion));
400 bool videoStreamHasEnded = false;
401 int32_t framesInQueue = 0;
402 int32_t frameNumber = 0;
403 int32_t framesCorrect = 0;
404 DecodedFrame frame = initDecodeFrame();
405
406 m_decoder->initialize(m_videoCodecOperation, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
407
408 if (!vulkanVideoDecodeParser->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder.get())))
409 {
410 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
411 }
412
413 while (framesInQueue > 0 || !videoStreamHasEnded)
414 {
415 framesInQueue = m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
416
417 while (framesInQueue == 0 && !videoStreamHasEnded)
418 {
419 if (!videoStreamHasEnded)
420 {
421 deUint8* pData = 0;
422 deInt64 size = 0;
423 const bool demuxerSuccess = ffmpegDemuxer->demux(&pData, &size);
424 const bool parserSuccess = vulkanVideoDecodeParser->parseByteStream(pData, size);
425
426 if (!demuxerSuccess || !parserSuccess)
427 videoStreamHasEnded = true;
428 }
429
430 framesInQueue = m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
431 }
432
433 if (frame.pictureIndex >= 0)
434 {
435 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
436 const VkImage image = frame.pDecodedImage->getImage();
437 const VkFormat format = frame.pDecodedImage->getFormat();
438 const VkImageLayout layout = frame.decodedImageLayout;
439 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
440
441 if (verifyImage(frameNumber, *resultImage))
442 framesCorrect++;
443
444 m_decoder->ReleaseDisplayedFrame(&frame);
445 frameNumber++;
446
447 if (frameNumber >= 1)
448 break;
449 }
450 }
451
452 if (!vulkanVideoDecodeParser->deinitialize())
453 {
454 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
455 }
456
457 if (framesCorrect > 0 && framesCorrect == frameNumber)
458 return tcu::TestStatus::pass("pass");
459 else
460 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
461 }
462
iterateDoubleFrame(void)463 tcu::TestStatus VideoDecodeTestInstance::iterateDoubleFrame (void)
464 {
465 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED
466 | (m_queryWithStatusRequired ? VideoDevice::VIDEO_DEVICE_FLAG_QUERY_WITH_STATUS_FOR_DECODE_SUPPORT : 0);
467 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
468 const DeviceInterface& vkd = getDeviceDriver();
469 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
470 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
471 Allocator& allocator = getAllocator();
472 MovePtr<vector<deUint8>> videoData = loadTestVideoData();
473 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer (m_decoder->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData));
474 VkExtensionProperties stdExtensionVersion = getExtensionVersion(m_videoCodecOperation);
475
476 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser (m_decoder->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation, &stdExtensionVersion));
477 bool videoStreamHasEnded = false;
478 int32_t framesInQueue = 0;
479 int32_t frameNumber = 0;
480 int32_t framesCorrect = 0;
481 DecodedFrame frames[2] = { initDecodeFrame(), initDecodeFrame() };
482
483 m_decoder->initialize(m_videoCodecOperation, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
484
485 if (!vulkanVideoDecodeParser->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder.get())))
486 {
487 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
488 }
489
490 while (framesInQueue > 0 || !videoStreamHasEnded)
491 {
492 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
493
494 while (framesInQueue < 2 && !videoStreamHasEnded)
495 {
496 if (!videoStreamHasEnded)
497 {
498 deUint8* pData = 0;
499 deInt64 size = 0;
500 const bool demuxerSuccess = ffmpegDemuxer->demux(&pData, &size);
501 const bool parserSuccess = vulkanVideoDecodeParser->parseByteStream(pData, size);
502
503 if (!demuxerSuccess || !parserSuccess)
504 videoStreamHasEnded = true;
505 }
506
507 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
508 }
509
510 for (size_t frameNdx = 0; frameNdx < 2; ++frameNdx)
511 {
512 DecodedFrame& frame = frames[frameNdx];
513
514 m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
515 }
516
517 for (size_t frameNdx = 0; frameNdx < 2; ++frameNdx)
518 {
519 DecodedFrame& frame = frames[frameNdx];
520 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
521 const VkImage image = frame.pDecodedImage->getImage();
522 const VkFormat format = frame.pDecodedImage->getFormat();
523 const VkImageLayout layout = frame.decodedImageLayout;
524
525 if (frame.pictureIndex >= 0)
526 {
527 const bool assumeCorrect = m_caseDef.testType == TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS;
528 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
529
530 if (assumeCorrect || verifyImage(frameNumber, *resultImage))
531 framesCorrect++;
532
533 m_decoder->ReleaseDisplayedFrame(&frame);
534 frameNumber++;
535
536 if (frameNumber >= DE_LENGTH_OF_ARRAY(frames))
537 break;
538 }
539 }
540
541 if (frameNumber >= DE_LENGTH_OF_ARRAY(frames))
542 break;
543 }
544
545 if (!vulkanVideoDecodeParser->deinitialize())
546 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
547
548 if (framesCorrect > 0 && framesCorrect == frameNumber)
549 return tcu::TestStatus::pass("pass");
550 else
551 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
552 }
553
iterateMultipleFrame(void)554 tcu::TestStatus VideoDecodeTestInstance::iterateMultipleFrame (void)
555 {
556 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED
557 | (m_queryWithStatusRequired ? VideoDevice::VIDEO_DEVICE_FLAG_QUERY_WITH_STATUS_FOR_DECODE_SUPPORT : 0);
558 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
559 const DeviceInterface& vkd = getDeviceDriver();
560 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
561 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
562 Allocator& allocator = getAllocator();
563 MovePtr<vector<deUint8>> videoData = loadTestVideoData();
564 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer (m_decoder->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData));
565 VkExtensionProperties stdExtensionVersion = getExtensionVersion(m_videoCodecOperation);
566
567 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser (m_decoder->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation, &stdExtensionVersion));
568 bool videoStreamHasEnded = false;
569 int32_t framesInQueue = 0;
570 int32_t frameNumber = 0;
571 int32_t framesCorrect = 0;
572 vector<DecodedFrame> frames (m_frameCountTrigger, initDecodeFrame());
573
574 m_decoder->initialize(m_videoCodecOperation, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
575
576 if (!vulkanVideoDecodeParser->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder.get())))
577 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
578
579 while (framesInQueue > 0 || !videoStreamHasEnded)
580 {
581 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
582
583 while (framesInQueue < m_frameCountTrigger && !videoStreamHasEnded)
584 {
585 if (!videoStreamHasEnded)
586 {
587 deUint8* pData = 0;
588 deInt64 size = 0;
589 const bool demuxerSuccess = ffmpegDemuxer->demux(&pData, &size);
590 const bool parserSuccess = vulkanVideoDecodeParser->parseByteStream(pData, size);
591
592 if (!demuxerSuccess || !parserSuccess)
593 videoStreamHasEnded = true;
594 }
595
596 framesInQueue = m_decoder->GetVideoFrameBuffer()->GetDisplayFramesCount();
597 }
598
599 for (int32_t frameNdx = 0; frameNdx < m_frameCountTrigger; ++frameNdx)
600 {
601 DecodedFrame& frame = frames[frameNdx];
602
603 m_decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frame);
604 }
605
606 bool success = true;
607
608 for (int32_t frameNdx = 0; frameNdx < m_frameCountTrigger; ++frameNdx)
609 {
610 DecodedFrame& frame = frames[frameNdx];
611 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
612 const VkImage image = frame.pDecodedImage->getImage();
613 const VkFormat format = frame.pDecodedImage->getFormat();
614 const VkImageLayout layout = frame.decodedImageLayout;
615
616 if (frame.pictureIndex >= 0)
617 {
618 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
619
620 if (success && verifyImageMultipleFrame(frameNumber, *resultImage))
621 framesCorrect++;
622 else
623 success = false;
624
625 m_decoder->ReleaseDisplayedFrame(&frame);
626 frameNumber++;
627 }
628 }
629 }
630
631 if (!vulkanVideoDecodeParser->deinitialize())
632 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
633
634 if (framesCorrect > 0 && framesCorrect == frameNumber)
635 return tcu::TestStatus::pass("pass");
636 else
637 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
638 }
639
verifyImage(uint32_t frameNumber,const MultiPlaneImageData & multiPlaneImageData)640 bool VideoDecodeTestInstance::verifyImage (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
641 {
642 const tcu::UVec2 imageSize = multiPlaneImageData.getSize();
643 const uint32_t barCount = 10;
644 const uint32_t barWidth = 16;
645 const uint32_t barNum = uint32_t(frameNumber) % barCount;
646 const uint32_t edgeX = imageSize.x() - barWidth * barNum;
647 const uint32_t colorNdx = uint32_t(frameNumber) / barCount;
648 const int32_t refColorsV[] = { 240, 34, 110 };
649 const int32_t refColorsY[] = { 81, 145, 41 };
650 const int32_t refColorsU[] = { 90, 0, 0 };
651 const tcu::UVec4 refColorV = tcu::UVec4(refColorsV[colorNdx], 0, 0, 0);
652 const tcu::UVec4 refColorY = tcu::UVec4(refColorsY[colorNdx], 0, 0, 0);
653 const tcu::UVec4 refColorU = tcu::UVec4(refColorsU[colorNdx], 0, 0, 0);
654 const tcu::UVec4 refBlankV = tcu::UVec4(128, 0, 0, 0);
655 const tcu::UVec4 refBlankY = tcu::UVec4( 16, 0, 0, 0);
656 const tcu::UVec4 refBlankU = tcu::UVec4(128, 0, 0, 0);
657 tcu::ConstPixelBufferAccess outPixelBufferAccessV = multiPlaneImageData.getChannelAccess(0);
658 tcu::ConstPixelBufferAccess outPixelBufferAccessY = multiPlaneImageData.getChannelAccess(1);
659 tcu::ConstPixelBufferAccess outPixelBufferAccessU = multiPlaneImageData.getChannelAccess(2);
660 tcu::TextureLevel refPixelBufferV (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
661 tcu::TextureLevel refPixelBufferY (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
662 tcu::TextureLevel refPixelBufferU (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
663 tcu::PixelBufferAccess refPixelBufferAccessV = refPixelBufferV.getAccess();
664 tcu::PixelBufferAccess refPixelBufferAccessY = refPixelBufferY.getAccess();
665 tcu::PixelBufferAccess refPixelBufferAccessU = refPixelBufferU.getAccess();
666 tcu::TestLog& log = m_context.getTestContext().getLog();
667 const string titleV = "Rendered frame " + de::toString(frameNumber) + ". V Component";
668 const string titleY = "Rendered frame " + de::toString(frameNumber) + ". Y Component";
669 const string titleU = "Rendered frame " + de::toString(frameNumber) + ". U Component";
670 const tcu::UVec4 threshold = tcu::UVec4(0, 0, 0, 0);
671
672 for (uint32_t x = 0; x < imageSize.x(); ++x)
673 {
674 const tcu::UVec4& colorV = x < edgeX ? refColorV : refBlankV;
675 const tcu::UVec4& colorY = x < edgeX ? refColorY : refBlankY;
676 const tcu::UVec4& colorU = x < edgeX ? refColorU : refBlankU;
677
678 for (uint32_t y = 0; y < imageSize.y(); ++y)
679 {
680 refPixelBufferAccessV.setPixel(colorV, x, y);
681 refPixelBufferAccessY.setPixel(colorY, x, y);
682 refPixelBufferAccessU.setPixel(colorU, x, y);
683 }
684 }
685
686 const bool resultV = tcu::intThresholdCompare(log, titleV.c_str(), "", refPixelBufferAccessV, outPixelBufferAccessV, threshold, tcu::COMPARE_LOG_ON_ERROR);
687 const bool resultY = tcu::intThresholdCompare(log, titleY.c_str(), "", refPixelBufferAccessY, outPixelBufferAccessY, threshold, tcu::COMPARE_LOG_ON_ERROR);
688 const bool resultU = tcu::intThresholdCompare(log, titleU.c_str(), "", refPixelBufferAccessU, outPixelBufferAccessU, threshold, tcu::COMPARE_LOG_ON_ERROR);
689
690 return resultV && resultY && resultU;
691 }
692
verifyImageMultipleFrame(uint32_t frameNumber,const MultiPlaneImageData & multiPlaneImageData)693 bool VideoDecodeTestInstance::verifyImageMultipleFrame (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
694 {
695 const bool noReferenceTests = m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13
696 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER
697 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13
698 || m_caseDef.testType == TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER;
699
700 if (noReferenceTests)
701 {
702 const bool h264 = m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13
703 || m_caseDef.testType == TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER;
704 const vector<ReferencePixel> referencePixels264
705 {
706 ReferencePixel(tcu::IVec3( 0, 0, 0), tcu::IVec3( 124, 53, 140)),
707 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 0), tcu::IVec3( 131, 190, 115)),
708 ReferencePixel(tcu::IVec3( 0, 0, 12), tcu::IVec3( 140, 223, 92)),
709 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 12), tcu::IVec3( 138, 166, 98)),
710 };
711 const vector<ReferencePixel> referencePixels265
712 {
713 ReferencePixel(tcu::IVec3( 0, 0, 0), tcu::IVec3( 124, 55, 144)),
714 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 0), tcu::IVec3( 130, 190, 114)),
715 ReferencePixel(tcu::IVec3( 0, 0, 12), tcu::IVec3( 142, 210, 94)),
716 ReferencePixel(tcu::IVec3(1920 - 1, 1080 - 1, 12), tcu::IVec3( 137, 166, 96)),
717 };
718 const vector<ReferencePixel>& referencePixels = h264 ? referencePixels264 : referencePixels265;
719
720 return verifyImageMultipleFrameNoReference(frameNumber, multiPlaneImageData, referencePixels);
721 }
722 else
723 return verifyImageMultipleFrameWithReference(frameNumber, multiPlaneImageData);
724 }
725
verifyImageMultipleFrameWithReference(uint32_t frameNumber,const MultiPlaneImageData & multiPlaneImageData)726 bool VideoDecodeTestInstance::verifyImageMultipleFrameWithReference (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
727 {
728 tcu::TestLog& log = m_context.getTestContext().getLog();
729 const bool firstHalf = frameNumber < 15;
730 const bool resolutionChange = m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE || m_caseDef.testType == TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB;
731 const uint32_t k = resolutionChange
732 ? (firstHalf ? 2 : 1)
733 : 1;
734 const uint32_t cellSize = 16 * k;
735 const uint32_t cellCountX = 11;
736 const uint32_t cellCountV = 9;
737 const tcu::UVec2 imageSize = { cellSize * cellCountX, cellSize * cellCountV };
738 const string titleV = "Rendered frame " + de::toString(frameNumber) + ". V Component";
739 const tcu::UVec4 refColor0V = tcu::UVec4(128, 0, 0, 255);
740 const tcu::UVec4 refColor1V = tcu::UVec4(128, 0, 0, 255);
741 const tcu::UVec4& refColorV = firstHalf ? refColor0V : refColor1V;
742 const tcu::UVec4& refBlankV = firstHalf ? refColor1V : refColor0V;
743 tcu::TextureLevel refPixelBufferV (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
744 tcu::PixelBufferAccess refPixelBufferAccessV = refPixelBufferV.getAccess();
745 MovePtr<tcu::TextureLevel> outPixelBufferV = convertToRGBASized(multiPlaneImageData.getChannelAccess(0), imageSize);
746 tcu::PixelBufferAccess outPixelBufferAccessV = outPixelBufferV->getAccess();
747 const string titleY = "Rendered frame " + de::toString(frameNumber) + ". Y Component";
748 const tcu::UVec4 refColor0Y = tcu::UVec4(235, 0, 0, 255);
749 const tcu::UVec4 refColor1Y = tcu::UVec4( 16, 0, 0, 255);
750 const tcu::UVec4& refColorY = firstHalf ? refColor0Y : refColor1Y;
751 const tcu::UVec4& refBlankY = firstHalf ? refColor1Y : refColor0Y;
752 tcu::TextureLevel refPixelBufferY (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
753 tcu::PixelBufferAccess refPixelBufferAccessY = refPixelBufferY.getAccess();
754 MovePtr<tcu::TextureLevel> outPixelBufferY = convertToRGBASized(multiPlaneImageData.getChannelAccess(1), imageSize);
755 tcu::PixelBufferAccess outPixelBufferAccessY = outPixelBufferY->getAccess();
756 const string titleU = "Rendered frame " + de::toString(frameNumber) + ". U Component";
757 const tcu::UVec4 refColor0U = tcu::UVec4(128, 0, 0, 255);
758 const tcu::UVec4 refColor1U = tcu::UVec4(128, 0, 0, 255);
759 const tcu::UVec4& refColorU = firstHalf ? refColor0U : refColor1U;
760 const tcu::UVec4& refBlankU = firstHalf ? refColor1U : refColor0U;
761 tcu::TextureLevel refPixelBufferU (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
762 tcu::PixelBufferAccess refPixelBufferAccessU = refPixelBufferU.getAccess();
763 MovePtr<tcu::TextureLevel> outPixelBufferU = convertToRGBASized(multiPlaneImageData.getChannelAccess(2), imageSize);
764 tcu::PixelBufferAccess outPixelBufferAccessU = outPixelBufferU->getAccess();
765 const tcu::UVec4 threshold = tcu::UVec4(0, 0, 0, 0);
766
767 for (uint32_t x = 0; x < imageSize.x(); ++x)
768 for (uint32_t y = 0; y < imageSize.y(); ++y)
769 {
770 refPixelBufferAccessV.setPixel(refBlankV, x, y);
771 refPixelBufferAccessY.setPixel(refBlankY, x, y);
772 refPixelBufferAccessU.setPixel(refBlankU, x, y);
773 }
774
775 for (uint32_t cellNdx = 0; cellNdx <= frameNumber % 15; cellNdx++)
776 {
777 const uint32_t cellOfs = firstHalf ? 0 : 6 * cellSize;
778 const uint32_t cellX0 = cellSize * (cellNdx % 5);
779 const uint32_t cellV0 = cellSize * (cellNdx / 5) + cellOfs;
780 const uint32_t cellX1 = cellX0 + cellSize;
781 const uint32_t cellV1 = cellV0 + cellSize;
782
783 for (uint32_t x = cellX0; x < cellX1; ++x)
784 for (uint32_t y = cellV0; y < cellV1; ++y)
785 {
786 refPixelBufferAccessV.setPixel(refColorV, x, y);
787 refPixelBufferAccessY.setPixel(refColorY, x, y);
788 refPixelBufferAccessU.setPixel(refColorU, x, y);
789 }
790 }
791
792 const bool resultV = tcu::intThresholdCompare(log, titleV.c_str(), "", refPixelBufferAccessV, outPixelBufferAccessV, threshold, tcu::COMPARE_LOG_ON_ERROR);
793 const bool resultY = tcu::intThresholdCompare(log, titleY.c_str(), "", refPixelBufferAccessY, outPixelBufferAccessY, threshold, tcu::COMPARE_LOG_ON_ERROR);
794 const bool resultU = tcu::intThresholdCompare(log, titleU.c_str(), "", refPixelBufferAccessU, outPixelBufferAccessU, threshold, tcu::COMPARE_LOG_ON_ERROR);
795
796 return resultV && resultY && resultU;
797 }
798
verifyImageMultipleFrameNoReference(uint32_t frameNumber,const MultiPlaneImageData & multiPlaneImageData,const vector<ReferencePixel> & referencePixels)799 bool VideoDecodeTestInstance::verifyImageMultipleFrameNoReference (uint32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData, const vector<ReferencePixel>& referencePixels)
800 {
801 bool decodeFrame = false;
802 for (size_t i = 0; i < referencePixels.size(); i++)
803 if (referencePixels[i].first.z() == static_cast<int>(frameNumber))
804 decodeFrame = true;
805
806 if (decodeFrame)
807 {
808 MovePtr<tcu::TextureLevel> outPixelBufferV = convertToRGBA(multiPlaneImageData.getChannelAccess(0));
809 tcu::PixelBufferAccess outPixelBufferAccessV = outPixelBufferV->getAccess();
810 MovePtr<tcu::TextureLevel> outPixelBufferY = convertToRGBA(multiPlaneImageData.getChannelAccess(1));
811 tcu::PixelBufferAccess outPixelBufferAccessY = outPixelBufferY->getAccess();
812 MovePtr<tcu::TextureLevel> outPixelBufferU = convertToRGBA(multiPlaneImageData.getChannelAccess(2));
813 tcu::PixelBufferAccess outPixelBufferAccessU = outPixelBufferU->getAccess();
814 tcu::TestLog& log = m_context.getTestContext().getLog();
815
816 log << tcu::TestLog::Message << "TODO: WARNING: ONLY FEW PIXELS ARE CHECKED\n" << tcu::TestLog::EndMessage;
817
818 log << tcu::TestLog::ImageSet("Frame", "")
819 << tcu::TestLog::Image("Result V", "Result V", outPixelBufferAccessV)
820 << tcu::TestLog::Image("Result Y", "Result Y", outPixelBufferAccessY)
821 << tcu::TestLog::Image("Result U", "Result U", outPixelBufferAccessU)
822 << tcu::TestLog::EndImageSet;
823
824 for (size_t i = 0; i < referencePixels.size(); i++)
825 if (referencePixels[i].first.z() == static_cast<int>(frameNumber))
826 {
827 const tcu::IVec3& pos = referencePixels[i].first;
828 const tcu::IVec3& ref = referencePixels[i].second;
829 const tcu::IVec3 value = tcu::IVec3(outPixelBufferAccessV.getPixelInt(pos.x(), pos.y()).x(),
830 outPixelBufferAccessY.getPixelInt(pos.x(), pos.y()).x(),
831 outPixelBufferAccessU.getPixelInt(pos.x(), pos.y()).x());
832
833 if (value != ref)
834 return false;
835 }
836 }
837
838 return true;
839 }
840
841 class DualVideoDecodeTestInstance : public VideoBaseTestInstance
842 {
843 public:
844 DualVideoDecodeTestInstance (Context& context,
845 const CaseDef& data);
846 ~DualVideoDecodeTestInstance (void);
847
848 MovePtr<vector<deUint8>> loadTestVideoData (bool primary);
849
850 tcu::TestStatus iterate (void);
851 bool verifyImage (bool firstClip,
852 int32_t frameNumber,
853 const MultiPlaneImageData& multiPlaneImageData);
854 protected:
855 CaseDef m_caseDef;
856 MovePtr<VideoBaseDecoder> m_decoder1;
857 MovePtr<VideoBaseDecoder> m_decoder2;
858 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation;
859 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation1;
860 VkVideoCodecOperationFlagBitsKHR m_videoCodecOperation2;
861 int32_t m_frameCountTrigger;
862 };
863
DualVideoDecodeTestInstance(Context & context,const CaseDef & data)864 DualVideoDecodeTestInstance::DualVideoDecodeTestInstance (Context& context, const CaseDef& data)
865 : VideoBaseTestInstance (context)
866 , m_caseDef (data)
867 , m_decoder1 (new VideoBaseDecoder(context))
868 , m_decoder2 (new VideoBaseDecoder(context))
869 , m_videoCodecOperation (VK_VIDEO_CODEC_OPERATION_NONE_KHR)
870 , m_videoCodecOperation1 (VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR)
871 , m_videoCodecOperation2 (VK_VIDEO_CODEC_OPERATION_NONE_KHR)
872 , m_frameCountTrigger (10)
873 {
874 const bool randomOrSwapped = false;
875 const bool queryResultWithStatus = false;
876
877 m_decoder1->setDecodeParameters(randomOrSwapped, queryResultWithStatus, m_frameCountTrigger + 1);
878 m_decoder2->setDecodeParameters(randomOrSwapped, queryResultWithStatus, m_frameCountTrigger + 1);
879
880 m_videoCodecOperation2 = m_caseDef.testType == TEST_TYPE_H264_DECODE_INTERLEAVED ? VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR
881 : m_caseDef.testType == TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED ? VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT
882 : m_caseDef.testType == TEST_TYPE_H264_H265_DECODE_INTERLEAVED ? VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
883 : VK_VIDEO_CODEC_OPERATION_NONE_KHR;
884
885 DE_ASSERT(m_videoCodecOperation2 != VK_VIDEO_CODEC_OPERATION_NONE_KHR);
886
887 m_videoCodecOperation = static_cast<VkVideoCodecOperationFlagBitsKHR>(m_videoCodecOperation1 | m_videoCodecOperation2);
888
889 if (m_videoCodecOperation2 == VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT)
890 TCU_THROW(NotSupportedError, "NOT IMPLEMENTED: REQUIRES ENCODE QUEUE");
891 }
892
~DualVideoDecodeTestInstance(void)893 DualVideoDecodeTestInstance::~DualVideoDecodeTestInstance (void)
894 {
895 }
896
loadTestVideoData(bool primary)897 MovePtr<vector<deUint8>> DualVideoDecodeTestInstance::loadTestVideoData (bool primary)
898 {
899 switch (m_caseDef.testType)
900 {
901 case TEST_TYPE_H264_DECODE_INTERLEAVED: return primary ? loadVideoDataClipA() : loadVideoDataClipB();
902 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED: return loadVideoDataClipA();
903 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED: return primary ? loadVideoDataClipA() : loadVideoDataClipD();
904 default: TCU_THROW(InternalError, "Unknown testType");
905 }
906 }
907
iterate(void)908 tcu::TestStatus DualVideoDecodeTestInstance::iterate (void)
909 {
910 const VideoDevice::VideoDeviceFlags videoDeviceFlags = VideoDevice::VIDEO_DEVICE_FLAG_REQUIRE_SYNC2_OR_NOT_SUPPORTED;
911 const VkDevice device = getDeviceSupportingQueue(VK_QUEUE_VIDEO_DECODE_BIT_KHR | VK_QUEUE_TRANSFER_BIT, m_videoCodecOperation, videoDeviceFlags);
912 const DeviceInterface& vkd = getDeviceDriver();
913 const deUint32 queueFamilyIndexDecode = getQueueFamilyIndexDecode();
914 const deUint32 queueFamilyIndexTransfer = getQueueFamilyIndexTransfer();
915 Allocator& allocator = getAllocator();
916 MovePtr<vector<deUint8>> videoData1 = loadTestVideoData(true);
917 MovePtr<vector<deUint8>> videoData2 = loadTestVideoData(false);
918 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer1 (m_decoder1->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData1));
919 MovePtr<IfcFfmpegDemuxer> ffmpegDemuxer2 (m_decoder2->GetIfcFfmpegFuncs()->createIfcFfmpegDemuxer(videoData2));
920 VkExtensionProperties stdExtensionVersion1 = getExtensionVersion(m_videoCodecOperation1);
921 VkExtensionProperties stdExtensionVersion2 = getExtensionVersion(m_videoCodecOperation2);
922
923 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser1 (m_decoder1->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation1, &stdExtensionVersion1));
924 MovePtr<IfcVulkanVideoDecodeParser> vulkanVideoDecodeParser2 (m_decoder2->GetNvFuncs()->createIfcVulkanVideoDecodeParser(m_videoCodecOperation2, &stdExtensionVersion2));
925 int32_t frameNumber = 0;
926 int32_t framesCorrect = 0;
927 vector<DecodedFrame> frames (m_frameCountTrigger, initDecodeFrame());
928
929 m_decoder1->initialize(m_videoCodecOperation1, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
930
931 if (!vulkanVideoDecodeParser1->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder1.get())))
932 {
933 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
934 }
935
936 m_decoder2->initialize(m_videoCodecOperation2, vkd, device, queueFamilyIndexTransfer, queueFamilyIndexDecode, allocator);
937
938 if (!vulkanVideoDecodeParser2->initialize(dynamic_cast<NvidiaVulkanParserVideoDecodeClient*>(m_decoder2.get())))
939 {
940 TCU_THROW(InternalError, "vulkanVideoDecodeParser->initialize()");
941 }
942
943 {
944 bool videoStreamHasEnded = false;
945 int32_t framesInQueue = 0;
946
947 while (framesInQueue < m_frameCountTrigger && !videoStreamHasEnded)
948 {
949 deUint8* pData = 0;
950 deInt64 size = 0;
951 const bool demuxerSuccess = ffmpegDemuxer1->demux(&pData, &size);
952 const bool parserSuccess = vulkanVideoDecodeParser1->parseByteStream(pData, size);
953
954 if (!demuxerSuccess || !parserSuccess)
955 videoStreamHasEnded = true;
956
957 framesInQueue = m_decoder1->GetVideoFrameBuffer()->GetDisplayFramesCount();
958 }
959 }
960
961 {
962 bool videoStreamHasEnded = false;
963 int32_t framesInQueue = 0;
964
965 while (framesInQueue < m_frameCountTrigger && !videoStreamHasEnded)
966 {
967 deUint8* pData = 0;
968 deInt64 size = 0;
969 const bool demuxerSuccess = ffmpegDemuxer2->demux(&pData, &size);
970 const bool parserSuccess = vulkanVideoDecodeParser2->parseByteStream(pData, size);
971
972 if (!demuxerSuccess || !parserSuccess)
973 videoStreamHasEnded = true;
974
975 framesInQueue = m_decoder2->GetVideoFrameBuffer()->GetDisplayFramesCount();
976 }
977 }
978
979 m_decoder1->DecodeCachedPictures(m_decoder2.get());
980
981 for (size_t decoderNdx = 0; decoderNdx < 2; ++decoderNdx)
982 {
983 const bool firstDecoder = (decoderNdx == 0);
984 VideoBaseDecoder* decoder = firstDecoder ? m_decoder1.get() : m_decoder2.get();
985 const bool firstClip = firstDecoder ? true
986 : m_caseDef.testType == TEST_TYPE_H264_H265_DECODE_INTERLEAVED;
987
988 for (int32_t frameNdx = 0; frameNdx < m_frameCountTrigger; ++frameNdx)
989 {
990 decoder->GetVideoFrameBuffer()->DequeueDecodedPicture(&frames[frameNdx]);
991
992 DecodedFrame& frame = frames[frameNdx];
993 const VkExtent2D imageExtent = frame.pDecodedImage->getExtent();
994 const VkImage image = frame.pDecodedImage->getImage();
995 const VkFormat format = frame.pDecodedImage->getFormat();
996 const VkImageLayout layout = frame.decodedImageLayout;
997
998 if (frame.pictureIndex >= 0)
999 {
1000 MovePtr<MultiPlaneImageData> resultImage = getDecodedImage(vkd, device, allocator, image, layout, format, imageExtent, queueFamilyIndexTransfer, queueFamilyIndexDecode);
1001
1002 if (verifyImage(firstClip, frameNdx, *resultImage))
1003 framesCorrect++;
1004
1005 decoder->ReleaseDisplayedFrame(&frame);
1006 frameNumber++;
1007 }
1008 }
1009 }
1010
1011 if (!vulkanVideoDecodeParser2->deinitialize())
1012 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
1013
1014 if (!vulkanVideoDecodeParser1->deinitialize())
1015 TCU_THROW(InternalError, "vulkanVideoDecodeParser->deinitialize()");
1016
1017 if (framesCorrect > 0 && framesCorrect == frameNumber)
1018 return tcu::TestStatus::pass("pass");
1019 else
1020 return tcu::TestStatus::fail("Some frames has not been decoded correctly (" + de::toString(framesCorrect) + "/" + de::toString(frameNumber) + ")");
1021 }
1022
verifyImage(bool firstClip,int32_t frameNumber,const MultiPlaneImageData & multiPlaneImageData)1023 bool DualVideoDecodeTestInstance::verifyImage (bool firstClip, int32_t frameNumber, const MultiPlaneImageData& multiPlaneImageData)
1024 {
1025 const tcu::UVec2 imageSize = multiPlaneImageData.getSize();
1026 const uint32_t k = firstClip ? 1 : 2;
1027 const uint32_t barCount = 10;
1028 const uint32_t barWidth = 16 * k;
1029 const uint32_t barNum = uint32_t(frameNumber) % barCount;
1030 const uint32_t edgeX = imageSize.x() - barWidth * barNum;
1031 const uint32_t colorNdx = uint32_t(frameNumber) / barCount;
1032 const int32_t refColorsV1[] = { 240, 34, 110 };
1033 const int32_t refColorsY1[] = { 81, 145, 41 };
1034 const int32_t refColorsU1[] = { 90, 0, 0 };
1035 const int32_t refColorsV2[] = { 16, 0, 0 };
1036 const int32_t refColorsY2[] = { 170, 0, 0 };
1037 const int32_t refColorsU2[] = { 166, 0, 0 };
1038 const tcu::UVec4 refColorV = tcu::UVec4(firstClip ? refColorsV1[colorNdx] : refColorsV2[colorNdx], 0, 0, 0);
1039 const tcu::UVec4 refColorY = tcu::UVec4(firstClip ? refColorsY1[colorNdx] : refColorsY2[colorNdx], 0, 0, 0);
1040 const tcu::UVec4 refColorU = tcu::UVec4(firstClip ? refColorsU1[colorNdx] : refColorsU2[colorNdx], 0, 0, 0);
1041 const tcu::UVec4 refBlankV = tcu::UVec4(128, 0, 0, 0);
1042 const tcu::UVec4 refBlankY = tcu::UVec4( 16, 0, 0, 0);
1043 const tcu::UVec4 refBlankU = tcu::UVec4(128, 0, 0, 0);
1044 tcu::ConstPixelBufferAccess outPixelBufferAccessV = multiPlaneImageData.getChannelAccess(0);
1045 tcu::ConstPixelBufferAccess outPixelBufferAccessY = multiPlaneImageData.getChannelAccess(1);
1046 tcu::ConstPixelBufferAccess outPixelBufferAccessU = multiPlaneImageData.getChannelAccess(2);
1047 tcu::TextureLevel refPixelBufferV (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
1048 tcu::TextureLevel refPixelBufferY (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
1049 tcu::TextureLevel refPixelBufferU (mapVkFormat(VK_FORMAT_R8_UNORM), imageSize.x(), imageSize.y());
1050 tcu::PixelBufferAccess refPixelBufferAccessV = refPixelBufferV.getAccess();
1051 tcu::PixelBufferAccess refPixelBufferAccessY = refPixelBufferY.getAccess();
1052 tcu::PixelBufferAccess refPixelBufferAccessU = refPixelBufferU.getAccess();
1053 tcu::TestLog& log = m_context.getTestContext().getLog();
1054 const string titleV = "Rendered frame " + de::toString(frameNumber) + ". V Component";
1055 const string titleY = "Rendered frame " + de::toString(frameNumber) + ". Y Component";
1056 const string titleU = "Rendered frame " + de::toString(frameNumber) + ". U Component";
1057 const tcu::UVec4 threshold = tcu::UVec4(0, 0, 0, 0);
1058
1059 for (uint32_t x = 0; x < imageSize.x(); ++x)
1060 {
1061 const tcu::UVec4& colorV = (x < edgeX) ? refColorV : refBlankV;
1062 const tcu::UVec4& colorY = (x < edgeX) ? refColorY : refBlankY;
1063 const tcu::UVec4& colorU = (x < edgeX) ? refColorU : refBlankU;
1064
1065 for (uint32_t y = 0; y < imageSize.y(); ++y)
1066 {
1067 refPixelBufferAccessV.setPixel(colorV, x, y);
1068 refPixelBufferAccessY.setPixel(colorY, x, y);
1069 refPixelBufferAccessU.setPixel(colorU, x, y);
1070 }
1071 }
1072
1073 const bool resultV = tcu::intThresholdCompare(log, titleV.c_str(), "", refPixelBufferAccessV, outPixelBufferAccessV, threshold, tcu::COMPARE_LOG_ON_ERROR);
1074 const bool resultY = tcu::intThresholdCompare(log, titleY.c_str(), "", refPixelBufferAccessY, outPixelBufferAccessY, threshold, tcu::COMPARE_LOG_ON_ERROR);
1075 const bool resultU = tcu::intThresholdCompare(log, titleU.c_str(), "", refPixelBufferAccessU, outPixelBufferAccessU, threshold, tcu::COMPARE_LOG_ON_ERROR);
1076
1077 return resultV && resultY && resultU;
1078 }
1079 #endif // #ifdef DE_BUILD_VIDEO
1080 class VideoDecodeTestCase : public TestCase
1081 {
1082 public:
1083 VideoDecodeTestCase (tcu::TestContext& context, const char* name, const char* desc, const CaseDef caseDef);
1084 ~VideoDecodeTestCase (void);
1085
1086 virtual TestInstance* createInstance (Context& context) const;
1087 virtual void checkSupport (Context& context) const;
1088
1089 private:
1090 CaseDef m_caseDef;
1091 };
1092
VideoDecodeTestCase(tcu::TestContext & context,const char * name,const char * desc,const CaseDef caseDef)1093 VideoDecodeTestCase::VideoDecodeTestCase (tcu::TestContext& context, const char* name, const char* desc, const CaseDef caseDef)
1094 : vkt::TestCase (context, name, desc)
1095 , m_caseDef (caseDef)
1096 {
1097 }
1098
~VideoDecodeTestCase(void)1099 VideoDecodeTestCase::~VideoDecodeTestCase (void)
1100 {
1101 }
1102
checkSupport(Context & context) const1103 void VideoDecodeTestCase::checkSupport (Context& context) const
1104 {
1105 #if (DE_PTR_SIZE != 8)
1106 // Issue #4253: https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/4253
1107 // These tests rely on external libraries to do the video parsing,
1108 // and those libraries are only available as 64-bit at this time.
1109 TCU_THROW(NotSupportedError, "CTS is not built 64-bit so cannot use the 64-bit video parser library");
1110 #endif
1111
1112 context.requireDeviceFunctionality("VK_KHR_video_queue");
1113 context.requireDeviceFunctionality("VK_KHR_synchronization2");
1114
1115 switch (m_caseDef.testType)
1116 {
1117 case TEST_TYPE_H264_DECODE_I:
1118 case TEST_TYPE_H264_DECODE_I_P:
1119 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER:
1120 case TEST_TYPE_H264_DECODE_I_P_B_13:
1121 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1122 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS:
1123 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE:
1124 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB:
1125 case TEST_TYPE_H264_DECODE_INTERLEAVED:
1126 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED:
1127 {
1128 context.requireDeviceFunctionality("VK_KHR_video_decode_h264");
1129 break;
1130 }
1131 case TEST_TYPE_H265_DECODE_I:
1132 case TEST_TYPE_H265_DECODE_I_P:
1133 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER:
1134 case TEST_TYPE_H265_DECODE_I_P_B_13:
1135 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1136 {
1137 context.requireDeviceFunctionality("VK_KHR_video_decode_h265");
1138 break;
1139 }
1140 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED:
1141 {
1142 context.requireDeviceFunctionality("VK_KHR_video_decode_h264");
1143 context.requireDeviceFunctionality("VK_KHR_video_decode_h265");
1144 break;
1145 }
1146 default:
1147 TCU_THROW(InternalError, "Unknown TestType");
1148 }
1149 }
1150
createInstance(Context & context) const1151 TestInstance* VideoDecodeTestCase::createInstance (Context& context) const
1152 {
1153 // Vulkan video is unsupported for android platform
1154 switch (m_caseDef.testType)
1155 {
1156 case TEST_TYPE_H264_DECODE_I:
1157 case TEST_TYPE_H264_DECODE_I_P:
1158 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER:
1159 case TEST_TYPE_H264_DECODE_I_P_B_13:
1160 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1161 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS:
1162 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE:
1163 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB:
1164 case TEST_TYPE_H265_DECODE_I:
1165 case TEST_TYPE_H265_DECODE_I_P:
1166 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER:
1167 case TEST_TYPE_H265_DECODE_I_P_B_13:
1168 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER:
1169 {
1170 #ifdef DE_BUILD_VIDEO
1171 return new VideoDecodeTestInstance(context, m_caseDef);
1172 #endif
1173 }
1174 case TEST_TYPE_H264_DECODE_INTERLEAVED:
1175 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED:
1176 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED:
1177 {
1178 #ifdef DE_BUILD_VIDEO
1179 return new DualVideoDecodeTestInstance(context, m_caseDef);
1180 #endif
1181 }
1182 default:
1183 TCU_THROW(InternalError, "Unknown TestType");
1184 }
1185 #ifndef DE_BUILD_VIDEO
1186 DE_UNREF(context);
1187 #endif
1188
1189 }
1190
getTestName(const TestType testType)1191 const char* getTestName (const TestType testType)
1192 {
1193 switch (testType)
1194 {
1195 case TEST_TYPE_H264_DECODE_I: return "h264_i";
1196 case TEST_TYPE_H264_DECODE_I_P: return "h264_i_p";
1197 case TEST_TYPE_H264_DECODE_I_P_NOT_MATCHING_ORDER: return "h264_i_p_not_matching_order";
1198 case TEST_TYPE_H264_DECODE_I_P_B_13: return "h264_i_p_b_13";
1199 case TEST_TYPE_H264_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return "h264_i_p_b_13_not_matching_order";
1200 case TEST_TYPE_H264_DECODE_QUERY_RESULT_WITH_STATUS: return "h264_query_with_status";
1201 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE: return "h264_resolution_change";
1202 case TEST_TYPE_H264_DECODE_RESOLUTION_CHANGE_DPB: return "h264_resolution_change_dpb";
1203 case TEST_TYPE_H264_DECODE_INTERLEAVED: return "h264_interleaved";
1204 case TEST_TYPE_H264_BOTH_DECODE_ENCODE_INTERLEAVED: return "h264_decode_encode_interleaved";
1205 case TEST_TYPE_H264_H265_DECODE_INTERLEAVED: return "h264_h265_interleaved";
1206 case TEST_TYPE_H265_DECODE_I: return "h265_i";
1207 case TEST_TYPE_H265_DECODE_I_P: return "h265_i_p";
1208 case TEST_TYPE_H265_DECODE_I_P_NOT_MATCHING_ORDER: return "h265_i_p_not_matching_order";
1209 case TEST_TYPE_H265_DECODE_I_P_B_13: return "h265_i_p_b_13";
1210 case TEST_TYPE_H265_DECODE_I_P_B_13_NOT_MATCHING_ORDER: return "h265_i_p_b_13_not_matching_order";
1211 default: TCU_THROW(InternalError, "Unknown TestType");
1212 }
1213 }
1214 } // anonymous
1215
createVideoDecodeTests(tcu::TestContext & testCtx)1216 tcu::TestCaseGroup* createVideoDecodeTests (tcu::TestContext& testCtx)
1217 {
1218 MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "decode", "Video decoding session tests"));
1219
1220 for (int testTypeNdx = 0; testTypeNdx < TEST_TYPE_LAST; ++testTypeNdx)
1221 {
1222 const TestType testType = static_cast<TestType>(testTypeNdx);
1223 const CaseDef caseDef =
1224 {
1225 testType, // TestType testType;
1226 };
1227
1228 group->addChild(new VideoDecodeTestCase(testCtx, getTestName(testType), "", caseDef));
1229 }
1230
1231 return group.release();
1232 }
1233 } // video
1234 } // vkt
1235