• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 //#define LOG_NDEBUG 0
6 #define LOG_TAG "C2VDAComponent_test"
7 
8 #include <C2VDAAllocatorStore.h>
9 #include <C2VDAComponent.h>
10 
11 #include <C2Buffer.h>
12 #include <C2BufferPriv.h>
13 #include <C2Component.h>
14 #include <C2PlatformSupport.h>
15 #include <C2Work.h>
16 #include <SimpleC2Interface.h>
17 
18 #include <base/files/file.h>
19 #include <base/files/file_path.h>
20 #include <base/md5.h>
21 #include <base/strings/string_piece.h>
22 #include <base/strings/string_split.h>
23 
24 #include <gtest/gtest.h>
25 #include <media/DataSource.h>
26 #include <media/IMediaHTTPService.h>
27 #include <media/MediaSource.h>
28 #include <media/stagefright/DataSourceFactory.h>
29 #include <media/stagefright/MediaDefs.h>
30 #include <media/stagefright/MediaErrors.h>
31 #include <media/stagefright/MediaExtractor.h>
32 #include <media/stagefright/MediaExtractorFactory.h>
33 #include <media/stagefright/MetaData.h>
34 #include <media/stagefright/Utils.h>
35 #include <media/stagefright/foundation/ABuffer.h>
36 #include <media/stagefright/foundation/ALooper.h>
37 #include <media/stagefright/foundation/AMessage.h>
38 #include <media/stagefright/foundation/AUtils.h>
39 #include <utils/Log.h>
40 
41 #include <fcntl.h>
42 #include <inttypes.h>
43 #include <stdio.h>
44 #include <stdlib.h>
45 #include <string.h>
46 #include <sys/stat.h>
47 #include <sys/time.h>
48 #include <sys/types.h>
49 #include <algorithm>
50 #include <chrono>
51 #include <thread>
52 
53 using namespace std::chrono_literals;
54 
55 namespace {
56 
57 const int kMD5StringLength = 32;
58 
59 // Read in golden MD5s for the sanity play-through check of this video
readGoldenMD5s(const std::string & videoFile,std::vector<std::string> * md5Strings)60 void readGoldenMD5s(const std::string& videoFile, std::vector<std::string>* md5Strings) {
61     base::FilePath filepath(videoFile + ".md5");
62     std::string allMD5s;
63     base::ReadFileToString(filepath, &allMD5s);
64     *md5Strings = base::SplitString(allMD5s, "\n", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
65     // Check these are legitimate MD5s.
66     for (const std::string& md5String : *md5Strings) {
67         // Ignore the empty string added by SplitString. Ignore comments.
68         if (!md5String.length() || md5String.at(0) == '#') {
69             continue;
70         }
71         if (static_cast<int>(md5String.length()) != kMD5StringLength) {
72             fprintf(stderr, "MD5 length error: %s\n", md5String.c_str());
73         }
74         if (std::count_if(md5String.begin(), md5String.end(), isxdigit) != kMD5StringLength) {
75             fprintf(stderr, "MD5 includes non-hex char: %s\n", md5String.c_str());
76         }
77     }
78     if (md5Strings->empty()) {
79         fprintf(stderr, "MD5 checksum file (%s) missing or empty.\n",
80                 filepath.MaybeAsASCII().c_str());
81     }
82 }
83 
84 // Get file path name of recording raw YUV
getRecordOutputPath(const std::string & videoFile,int width,int height)85 base::FilePath getRecordOutputPath(const std::string& videoFile, int width, int height) {
86     base::FilePath filepath(videoFile);
87     filepath = filepath.RemoveExtension();
88     std::string suffix = "_output_" + std::to_string(width) + "x" + std::to_string(height) + ".yuv";
89     return base::FilePath(filepath.value() + suffix);
90 }
91 }  // namespace
92 
93 namespace android {
94 
95 // Input video data parameters. This could be overwritten by user argument [-i].
96 // The syntax of each column is:
97 //  filename:componentName:width:height:numFrames:numFragments
98 // - |filename| is the file path to mp4 (h264) or webm (VP8/9) video.
99 // - |componentName| specifies the name of decoder component.
100 // - |width| and |height| are for video size (in pixels).
101 // - |numFrames| is the number of picture frames.
102 // - |numFragments| is the NALU (h264) or frame (VP8/9) count by MediaExtractor.
103 const char* gTestVideoData = "bear.mp4:c2.vda.avc.decoder:640:360:82:84";
104 //const char* gTestVideoData = "bear-vp8.webm:c2.vda.vp8.decoder:640:360:82:82";
105 //const char* gTestVideoData = "bear-vp9.webm:c2.vda.vp9.decoder:320:240:82:82";
106 
107 // Record decoded output frames as raw YUV format.
108 // The recorded file will be named as "<video_name>_output_<width>x<height>.yuv" under the same
109 // folder of input video file.
110 bool gRecordOutputYUV = false;
111 
112 const std::string kH264DecoderName = "c2.vda.avc.decoder";
113 const std::string kVP8DecoderName = "c2.vda.vp8.decoder";
114 const std::string kVP9DecoderName = "c2.vda.vp9.decoder";
115 
116 // Magic constants for indicating the timing of flush being called.
117 enum FlushPoint : int { END_OF_STREAM_FLUSH = -3, MID_STREAM_FLUSH = -2, NO_FLUSH = -1 };
118 
119 struct TestVideoFile {
120     enum class CodecType { UNKNOWN, H264, VP8, VP9 };
121 
122     std::string mFilename;
123     std::string mComponentName;
124     CodecType mCodec = CodecType::UNKNOWN;
125     int mWidth = -1;
126     int mHeight = -1;
127     int mNumFrames = -1;
128     int mNumFragments = -1;
129     sp<IMediaSource> mData;
130 };
131 
132 class C2VDALinearBuffer : public C2Buffer {
133 public:
C2VDALinearBuffer(const std::shared_ptr<C2LinearBlock> & block)134     explicit C2VDALinearBuffer(const std::shared_ptr<C2LinearBlock>& block)
135           : C2Buffer({block->share(block->offset(), block->size(), C2Fence())}) {}
136 };
137 
138 class Listener;
139 
140 class C2VDAComponentTest : public ::testing::Test {
141 public:
142     void onWorkDone(std::weak_ptr<C2Component> component,
143                     std::list<std::unique_ptr<C2Work>> workItems);
144     void onTripped(std::weak_ptr<C2Component> component,
145                    std::vector<std::shared_ptr<C2SettingResult>> settingResult);
146     void onError(std::weak_ptr<C2Component> component, uint32_t errorCode);
147 
148 protected:
149     C2VDAComponentTest();
150     void SetUp() override;
151 
152     void parseTestVideoData(const char* testVideoData);
153 
154 protected:
155     using ULock = std::unique_lock<std::mutex>;
156 
157     enum {
158         kWorkCount = 16,
159     };
160 
161     std::shared_ptr<Listener> mListener;
162 
163     // The array of output video frame counters which will be counted in listenerThread. The array
164     // length equals to iteration time of stream play.
165     std::vector<int> mOutputFrameCounts;
166     // The array of work counters returned from component which will be counted in listenerThread.
167     // The array length equals to iteration time of stream play.
168     std::vector<int> mFinishedWorkCounts;
169     // The array of output frame MD5Sum which will be computed in listenerThread. The array length
170     // equals to iteration time of stream play.
171     std::vector<std::string> mMD5Strings;
172 
173     // Mutex for |mWorkQueue| among main and listenerThread.
174     std::mutex mQueueLock;
175     std::condition_variable mQueueCondition;
176     std::list<std::unique_ptr<C2Work>> mWorkQueue;
177 
178     // Mutex for |mProcessedWork| among main and listenerThread.
179     std::mutex mProcessedLock;
180     std::condition_variable mProcessedCondition;
181     std::list<std::unique_ptr<C2Work>> mProcessedWork;
182 
183     // Mutex for |mFlushDone| among main and listenerThread.
184     std::mutex mFlushDoneLock;
185     std::condition_variable mFlushDoneCondition;
186     bool mFlushDone;
187 
188     std::unique_ptr<TestVideoFile> mTestVideoFile;
189 };
190 
191 class Listener : public C2Component::Listener {
192 public:
Listener(C2VDAComponentTest * thiz)193     explicit Listener(C2VDAComponentTest* thiz) : mThis(thiz) {}
194     virtual ~Listener() = default;
195 
onWorkDone_nb(std::weak_ptr<C2Component> component,std::list<std::unique_ptr<C2Work>> workItems)196     virtual void onWorkDone_nb(std::weak_ptr<C2Component> component,
197                                std::list<std::unique_ptr<C2Work>> workItems) override {
198         mThis->onWorkDone(component, std::move(workItems));
199     }
200 
onTripped_nb(std::weak_ptr<C2Component> component,std::vector<std::shared_ptr<C2SettingResult>> settingResult)201     virtual void onTripped_nb(
202             std::weak_ptr<C2Component> component,
203             std::vector<std::shared_ptr<C2SettingResult>> settingResult) override {
204         mThis->onTripped(component, settingResult);
205     }
206 
onError_nb(std::weak_ptr<C2Component> component,uint32_t errorCode)207     virtual void onError_nb(std::weak_ptr<C2Component> component, uint32_t errorCode) override {
208         mThis->onError(component, errorCode);
209     }
210 
211 private:
212     C2VDAComponentTest* const mThis;
213 };
214 
C2VDAComponentTest()215 C2VDAComponentTest::C2VDAComponentTest() : mListener(new Listener(this)) {}
216 
onWorkDone(std::weak_ptr<C2Component> component,std::list<std::unique_ptr<C2Work>> workItems)217 void C2VDAComponentTest::onWorkDone(std::weak_ptr<C2Component> component,
218                                     std::list<std::unique_ptr<C2Work>> workItems) {
219     (void)component;
220     ULock l(mProcessedLock);
221     for (auto& item : workItems) {
222         mProcessedWork.emplace_back(std::move(item));
223     }
224     mProcessedCondition.notify_all();
225 }
226 
onTripped(std::weak_ptr<C2Component> component,std::vector<std::shared_ptr<C2SettingResult>> settingResult)227 void C2VDAComponentTest::onTripped(std::weak_ptr<C2Component> component,
228                                    std::vector<std::shared_ptr<C2SettingResult>> settingResult) {
229     (void)component;
230     (void)settingResult;
231     // no-ops
232 }
233 
onError(std::weak_ptr<C2Component> component,uint32_t errorCode)234 void C2VDAComponentTest::onError(std::weak_ptr<C2Component> component, uint32_t errorCode) {
235     (void)component;
236     // fail the test
237     FAIL() << "Get error code from component: " << errorCode;
238 }
239 
SetUp()240 void C2VDAComponentTest::SetUp() {
241     parseTestVideoData(gTestVideoData);
242 
243     mWorkQueue.clear();
244     for (int i = 0; i < kWorkCount; ++i) {
245         mWorkQueue.emplace_back(new C2Work);
246     }
247     mProcessedWork.clear();
248     mFlushDone = false;
249 }
250 
getMediaSourceFromFile(const std::string & filename,const TestVideoFile::CodecType codec,sp<IMediaSource> * source)251 static bool getMediaSourceFromFile(const std::string& filename,
252                                    const TestVideoFile::CodecType codec, sp<IMediaSource>* source) {
253     source->clear();
254 
255     sp<DataSource> dataSource =
256             DataSourceFactory::CreateFromURI(nullptr /* httpService */, filename.c_str());
257 
258     if (dataSource == nullptr) {
259         fprintf(stderr, "Unable to create data source.\n");
260         return false;
261     }
262 
263     sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
264     if (extractor == nullptr) {
265         fprintf(stderr, "could not create extractor.\n");
266         return false;
267     }
268 
269     std::string expectedMime;
270     if (codec == TestVideoFile::CodecType::H264) {
271         expectedMime = "video/avc";
272     } else if (codec == TestVideoFile::CodecType::VP8) {
273         expectedMime = "video/x-vnd.on2.vp8";
274     } else if (codec == TestVideoFile::CodecType::VP9) {
275         expectedMime = "video/x-vnd.on2.vp9";
276     } else {
277         fprintf(stderr, "unsupported codec type.\n");
278         return false;
279     }
280 
281     for (size_t i = 0, numTracks = extractor->countTracks(); i < numTracks; ++i) {
282         sp<MetaData> meta =
283                 extractor->getTrackMetaData(i, MediaExtractor::kIncludeExtensiveMetaData);
284         if (meta == nullptr) {
285             continue;
286         }
287         const char* mime;
288         meta->findCString(kKeyMIMEType, &mime);
289         if (!strcasecmp(mime, expectedMime.c_str())) {
290             *source = extractor->getTrack(i);
291             if (*source == nullptr) {
292                 fprintf(stderr, "It's NULL track for track %zu.\n", i);
293                 return false;
294             }
295             return true;
296         }
297     }
298     fprintf(stderr, "No track found.\n");
299     return false;
300 }
301 
parseTestVideoData(const char * testVideoData)302 void C2VDAComponentTest::parseTestVideoData(const char* testVideoData) {
303     ALOGV("videoDataStr: %s", testVideoData);
304     mTestVideoFile = std::make_unique<TestVideoFile>();
305 
306     auto splitString = [](const std::string& input, const char delim) {
307         std::vector<std::string> splits;
308         auto beg = input.begin();
309         while (beg != input.end()) {
310             auto pos = std::find(beg, input.end(), delim);
311             splits.emplace_back(beg, pos);
312             beg = pos != input.end() ? pos + 1 : pos;
313         }
314         return splits;
315     };
316     auto tokens = splitString(testVideoData, ':');
317     ASSERT_EQ(tokens.size(), 6u);
318     mTestVideoFile->mFilename = tokens[0];
319     ASSERT_GT(mTestVideoFile->mFilename.length(), 0u);
320 
321     mTestVideoFile->mComponentName = tokens[1];
322     if (mTestVideoFile->mComponentName == kH264DecoderName) {
323         mTestVideoFile->mCodec = TestVideoFile::CodecType::H264;
324     } else if (mTestVideoFile->mComponentName == kVP8DecoderName) {
325         mTestVideoFile->mCodec = TestVideoFile::CodecType::VP8;
326     } else if (mTestVideoFile->mComponentName == kVP9DecoderName) {
327         mTestVideoFile->mCodec = TestVideoFile::CodecType::VP9;
328     }
329     ASSERT_NE(mTestVideoFile->mCodec, TestVideoFile::CodecType::UNKNOWN);
330 
331     mTestVideoFile->mWidth = std::stoi(tokens[2]);
332     mTestVideoFile->mHeight = std::stoi(tokens[3]);
333     mTestVideoFile->mNumFrames = std::stoi(tokens[4]);
334     mTestVideoFile->mNumFragments = std::stoi(tokens[5]);
335 
336     ALOGV("mTestVideoFile: %s, %s, %d, %d, %d, %d", mTestVideoFile->mFilename.c_str(),
337           mTestVideoFile->mComponentName.c_str(), mTestVideoFile->mWidth, mTestVideoFile->mHeight,
338           mTestVideoFile->mNumFrames, mTestVideoFile->mNumFragments);
339 }
340 
getFrameStringPieces(const C2GraphicView & constGraphicView,std::vector<::base::StringPiece> * framePieces)341 static void getFrameStringPieces(const C2GraphicView& constGraphicView,
342                                  std::vector<::base::StringPiece>* framePieces) {
343     const uint8_t* const* constData = constGraphicView.data();
344     ASSERT_NE(constData, nullptr);
345     const C2PlanarLayout& layout = constGraphicView.layout();
346     ASSERT_EQ(layout.type, C2PlanarLayout::TYPE_YUV) << "Only support YUV plane format";
347 
348     framePieces->clear();
349     framePieces->push_back(
350             ::base::StringPiece(reinterpret_cast<const char*>(constData[C2PlanarLayout::PLANE_Y]),
351                                 constGraphicView.width() * constGraphicView.height()));
352     if (layout.planes[C2PlanarLayout::PLANE_U].colInc == 2) {  // semi-planar mode
353         framePieces->push_back(::base::StringPiece(
354                 reinterpret_cast<const char*>(std::min(constData[C2PlanarLayout::PLANE_U],
355                                                        constData[C2PlanarLayout::PLANE_V])),
356                 constGraphicView.width() * constGraphicView.height() / 2));
357     } else {
358         framePieces->push_back(::base::StringPiece(
359                 reinterpret_cast<const char*>(constData[C2PlanarLayout::PLANE_U]),
360                 constGraphicView.width() * constGraphicView.height() / 4));
361         framePieces->push_back(::base::StringPiece(
362                 reinterpret_cast<const char*>(constData[C2PlanarLayout::PLANE_V]),
363                 constGraphicView.width() * constGraphicView.height() / 4));
364     }
365 }
366 
367 // Test parameters:
368 // - Flush after work index. If this value is not negative, test will signal flush to component
369 //   after queueing the work frame index equals to this value in the first iteration. Negative
370 //   values may be magic constants, please refer to FlushPoint enum.
371 // - Number of play through. This value specifies the iteration time for playing entire video. If
372 //   |mFlushAfterWorkIndex| is not negative, the first iteration will perform flush, then repeat
373 //   times as this value for playing entire video.
374 // - Sanity check. If this is true, decoded content sanity check is enabled. Test will compute the
375 //   MD5Sum for output frame data for a play-though iteration (not flushed), and compare to golden
376 //   MD5Sums which should be stored in the file |video_filename|.md5
377 // - Use dummy EOS work. If this is true, test will queue a dummy work with end-of-stream flag in
378 //   the end of all input works. On the contrary, test will call drain_nb() to component.
379 class C2VDAComponentParamTest
380       : public C2VDAComponentTest,
381         public ::testing::WithParamInterface<std::tuple<int, uint32_t, bool, bool>> {
382 protected:
383     int mFlushAfterWorkIndex;
384     uint32_t mNumberOfPlaythrough;
385     bool mSanityCheck;
386     bool mUseDummyEOSWork;
387 };
388 
TEST_P(C2VDAComponentParamTest,SimpleDecodeTest)389 TEST_P(C2VDAComponentParamTest, SimpleDecodeTest) {
390     mFlushAfterWorkIndex = std::get<0>(GetParam());
391     if (mFlushAfterWorkIndex == FlushPoint::MID_STREAM_FLUSH) {
392         mFlushAfterWorkIndex = mTestVideoFile->mNumFragments / 2;
393     } else if (mFlushAfterWorkIndex == FlushPoint::END_OF_STREAM_FLUSH) {
394         mFlushAfterWorkIndex = mTestVideoFile->mNumFragments - 1;
395     }
396     ASSERT_LT(mFlushAfterWorkIndex, mTestVideoFile->mNumFragments);
397     mNumberOfPlaythrough = std::get<1>(GetParam());
398 
399     if (mFlushAfterWorkIndex >= 0) {
400         mNumberOfPlaythrough++;  // add the first iteration for perform mid-stream flushing.
401     }
402 
403     mSanityCheck = std::get<2>(GetParam());
404     mUseDummyEOSWork = std::get<3>(GetParam());
405 
406     // Reset counters and determine the expected answers for all iterations.
407     mOutputFrameCounts.resize(mNumberOfPlaythrough, 0);
408     mFinishedWorkCounts.resize(mNumberOfPlaythrough, 0);
409     mMD5Strings.resize(mNumberOfPlaythrough);
410     std::vector<int> expectedOutputFrameCounts(mNumberOfPlaythrough, mTestVideoFile->mNumFrames);
411     auto expectedWorkCount = mTestVideoFile->mNumFragments;
412     if (mUseDummyEOSWork) {
413         expectedWorkCount += 1;  // plus one dummy EOS work
414     }
415     std::vector<int> expectedFinishedWorkCounts(mNumberOfPlaythrough, expectedWorkCount);
416     if (mFlushAfterWorkIndex >= 0) {
417         // First iteration performs the mid-stream flushing.
418         expectedOutputFrameCounts[0] = mFlushAfterWorkIndex + 1;
419         expectedFinishedWorkCounts[0] = mFlushAfterWorkIndex + 1;
420     }
421 
422     std::shared_ptr<C2Component> component(std::make_shared<C2VDAComponent>(
423             mTestVideoFile->mComponentName, 0, std::make_shared<C2ReflectorHelper>()));
424 
425     // Get input allocator & block pool.
426     std::shared_ptr<C2AllocatorStore> store = GetCodec2PlatformAllocatorStore();
427     std::shared_ptr<C2Allocator> inputAllocator;
428     std::shared_ptr<C2BlockPool> inputBlockPool;
429 
430     CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &inputAllocator), C2_OK);
431     inputBlockPool = std::make_shared<C2BasicLinearBlockPool>(inputAllocator);
432 
433     // Setup output block pool (bufferpool-backed).
434     std::vector<std::unique_ptr<C2Param>> params;
435     ASSERT_EQ(component->intf()->query_vb({}, {C2PortAllocatorsTuning::output::PARAM_TYPE},
436                                           C2_DONT_BLOCK, &params),
437               C2_OK);
438     ASSERT_EQ(params.size(), 1u);
439     C2PortAllocatorsTuning::output* outputAllocators =
440             C2PortAllocatorsTuning::output::From(params[0].get());
441     C2Allocator::id_t outputAllocatorId = outputAllocators->m.values[0];
442     ALOGV("output allocator ID = %u", outputAllocatorId);
443 
444     // Check bufferpool-backed block pool is used.
445     ASSERT_EQ(outputAllocatorId, C2VDAAllocatorStore::V4L2_BUFFERPOOL);
446 
447     std::shared_ptr<C2BlockPool> outputBlockPool;
448     ASSERT_EQ(CreateCodec2BlockPool(outputAllocatorId, component, &outputBlockPool), C2_OK);
449     C2BlockPool::local_id_t outputPoolId = outputBlockPool->getLocalId();
450     ALOGV("output block pool ID = %" PRIu64 "", outputPoolId);
451 
452     std::unique_ptr<C2PortBlockPoolsTuning::output> poolIdsTuning =
453             C2PortBlockPoolsTuning::output::AllocUnique({outputPoolId});
454 
455     std::vector<std::unique_ptr<C2SettingResult>> failures;
456     ASSERT_EQ(component->intf()->config_vb({poolIdsTuning.get()}, C2_MAY_BLOCK, &failures), C2_OK);
457 
458     // Set listener and start.
459     ASSERT_EQ(component->setListener_vb(mListener, C2_DONT_BLOCK), C2_OK);
460     ASSERT_EQ(component->start(), C2_OK);
461 
462     std::atomic_bool running(true);
463     std::thread listenerThread([this, &running]() {
464         uint32_t iteration = 0;
465         ::base::MD5Context md5Ctx;
466         ::base::MD5Init(&md5Ctx);
467         ::base::File recordFile;
468         if (gRecordOutputYUV) {
469             auto recordFilePath = getRecordOutputPath(
470                     mTestVideoFile->mFilename, mTestVideoFile->mWidth, mTestVideoFile->mHeight);
471             fprintf(stdout, "record output file: %s\n", recordFilePath.value().c_str());
472             recordFile = ::base::File(recordFilePath,
473                                       ::base::File::FLAG_OPEN_ALWAYS | ::base::File::FLAG_WRITE);
474             ASSERT_TRUE(recordFile.IsValid());
475         }
476         while (running) {
477             std::unique_ptr<C2Work> work;
478             {
479                 ULock l(mProcessedLock);
480                 if (mProcessedWork.empty()) {
481                     mProcessedCondition.wait_for(l, 100ms);
482                     if (mProcessedWork.empty()) {
483                         continue;
484                     }
485                 }
486                 work = std::move(mProcessedWork.front());
487                 mProcessedWork.pop_front();
488             }
489             mFinishedWorkCounts[iteration]++;
490             ALOGV("Output: frame index: %llu result: %d flags: 0x%x buffers: %zu",
491                   work->input.ordinal.frameIndex.peekull(), work->result,
492                   work->worklets.front()->output.flags,
493                   work->worklets.front()->output.buffers.size());
494 
495             // Don't check output buffer and flags for flushed works.
496             bool flushed = work->result == C2_NOT_FOUND;
497 
498             ASSERT_EQ(work->worklets.size(), 1u);
499             if (!flushed && work->worklets.front()->output.buffers.size() == 1u) {
500                 std::shared_ptr<C2Buffer> output = work->worklets.front()->output.buffers[0];
501                 C2ConstGraphicBlock graphicBlock = output->data().graphicBlocks().front();
502 
503                 // check graphic buffer size (coded size) is not less than given video size.
504                 ASSERT_LE(mTestVideoFile->mWidth, static_cast<int>(graphicBlock.width()));
505                 ASSERT_LE(mTestVideoFile->mHeight, static_cast<int>(graphicBlock.height()));
506 
507                 // check visible rect equals to given video size.
508                 ASSERT_EQ(mTestVideoFile->mWidth, static_cast<int>(graphicBlock.crop().width));
509                 ASSERT_EQ(mTestVideoFile->mHeight, static_cast<int>(graphicBlock.crop().height));
510                 ASSERT_EQ(0u, graphicBlock.crop().left);
511                 ASSERT_EQ(0u, graphicBlock.crop().top);
512 
513                 // Intended behavior for Intel libva driver (crbug.com/148546):
514                 // The 5ms latency is laid here to make sure surface content is finished processed
515                 // processed by libva.
516                 std::this_thread::sleep_for(std::chrono::milliseconds(5));
517 
518                 const C2GraphicView& constGraphicView = graphicBlock.map().get();
519                 ASSERT_EQ(C2_OK, constGraphicView.error());
520                 std::vector<::base::StringPiece> framePieces;
521                 getFrameStringPieces(constGraphicView, &framePieces);
522                 ASSERT_FALSE(framePieces.empty());
523                 if (mSanityCheck) {
524                     for (const auto& piece : framePieces) {
525                         ::base::MD5Update(&md5Ctx, piece);
526                     }
527                 }
528                 if (gRecordOutputYUV) {
529                     for (const auto& piece : framePieces) {
530                         ASSERT_EQ(static_cast<int>(piece.length()),
531                                   recordFile.WriteAtCurrentPos(piece.data(), piece.length()))
532                                 << "Failed to write file for yuv recording...";
533                     }
534                 }
535 
536                 work->worklets.front()->output.buffers.clear();
537                 mOutputFrameCounts[iteration]++;
538             }
539 
540             bool iteration_end = !flushed && (work->worklets.front()->output.flags &
541                                               C2FrameData::FLAG_END_OF_STREAM);
542 
543             // input buffer should be reset in component side.
544             ASSERT_EQ(work->input.buffers.size(), 1u);
545             ASSERT_TRUE(work->input.buffers.front() == nullptr);
546             work->worklets.clear();
547             work->workletsProcessed = 0;
548 
549             if (iteration == 0 && work->input.ordinal.frameIndex.peeku() ==
550                                           static_cast<uint64_t>(mFlushAfterWorkIndex)) {
551                 ULock l(mFlushDoneLock);
552                 mFlushDone = true;
553                 mFlushDoneCondition.notify_all();
554                 iteration_end = true;
555             }
556 
557             ULock l(mQueueLock);
558             mWorkQueue.emplace_back(std::move(work));
559             mQueueCondition.notify_all();
560 
561             if (iteration_end) {
562                 // record md5sum
563                 ::base::MD5Digest digest;
564                 ::base::MD5Final(&digest, &md5Ctx);
565                 mMD5Strings[iteration] = ::base::MD5DigestToBase16(digest);
566                 ::base::MD5Init(&md5Ctx);
567 
568                 iteration++;
569                 if (iteration == mNumberOfPlaythrough) {
570                     running.store(false);  // stop the thread
571                 }
572             }
573         }
574     });
575 
576     for (uint32_t iteration = 0; iteration < mNumberOfPlaythrough; ++iteration) {
577         ASSERT_TRUE(getMediaSourceFromFile(mTestVideoFile->mFilename, mTestVideoFile->mCodec,
578                                            &mTestVideoFile->mData));
579 
580         std::deque<sp<ABuffer>> csds;
581         if (mTestVideoFile->mCodec == TestVideoFile::CodecType::H264) {
582             // Get csd buffers for h264.
583             sp<AMessage> format;
584             (void)convertMetaDataToMessage(mTestVideoFile->mData->getFormat(), &format);
585             csds.resize(2);
586             format->findBuffer("csd-0", &csds[0]);
587             format->findBuffer("csd-1", &csds[1]);
588             ASSERT_TRUE(csds[0] != nullptr && csds[1] != nullptr);
589         }
590 
591         ASSERT_EQ(mTestVideoFile->mData->start(), OK);
592 
593         int numWorks = 0;
594         while (true) {
595             size_t size = 0u;
596             void* data = nullptr;
597             int64_t timestamp = 0u;
598             MediaBufferBase* buffer = nullptr;
599             sp<ABuffer> csd;
600             C2FrameData::flags_t inputFlag = static_cast<C2FrameData::flags_t>(0);
601             bool queueDummyEOSWork = false;
602             if (!csds.empty()) {
603                 csd = std::move(csds.front());
604                 csds.pop_front();
605                 size = csd->size();
606                 data = csd->data();
607                 inputFlag = C2FrameData::FLAG_CODEC_CONFIG;
608             } else {
609                 if (mTestVideoFile->mData->read(&buffer) != OK) {
610                     ASSERT_TRUE(buffer == nullptr);
611                     if (mUseDummyEOSWork) {
612                         ALOGV("Meet end of stream. Put a dummy EOS work.");
613                         queueDummyEOSWork = true;
614                     } else {
615                         ALOGV("Meet end of stream. Now drain the component.");
616                         ASSERT_EQ(component->drain_nb(C2Component::DRAIN_COMPONENT_WITH_EOS),
617                                   C2_OK);
618                         break;
619                     }
620                     // TODO(johnylin): add test with drain with DRAIN_COMPONENT_NO_EOS when we know
621                     //                 the actual use case of it.
622                 } else {
623                     MetaDataBase& meta = buffer->meta_data();
624                     ASSERT_TRUE(meta.findInt64(kKeyTime, &timestamp));
625                     size = buffer->size();
626                     data = buffer->data();
627                 }
628             }
629 
630             std::unique_ptr<C2Work> work;
631             while (!work) {
632                 ULock l(mQueueLock);
633                 if (!mWorkQueue.empty()) {
634                     work = std::move(mWorkQueue.front());
635                     mWorkQueue.pop_front();
636                 } else {
637                     mQueueCondition.wait_for(l, 100ms);
638                 }
639             }
640 
641             work->input.flags = inputFlag;
642             work->input.ordinal.frameIndex = static_cast<uint64_t>(numWorks);
643             work->input.buffers.clear();
644 
645             std::shared_ptr<C2LinearBlock> block;
646             if (queueDummyEOSWork) {
647                 // Create the dummy EOS work with no input buffer inside.
648                 work->input.flags = static_cast<C2FrameData::flags_t>(
649                         work->input.flags | C2FrameData::FLAG_END_OF_STREAM);
650                 work->input.ordinal.timestamp = 0;  // timestamp is invalid for dummy EOS work
651                 ALOGV("Input: (Dummy EOS) id: %llu", work->input.ordinal.frameIndex.peekull());
652             } else {
653                 work->input.ordinal.timestamp = static_cast<uint64_t>(timestamp);
654 
655                 // Allocate an input buffer with data size.
656                 inputBlockPool->fetchLinearBlock(
657                         size, {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
658                 C2WriteView view = block->map().get();
659                 ASSERT_EQ(view.error(), C2_OK);
660                 memcpy(view.base(), data, size);
661                 work->input.buffers.emplace_back(new C2VDALinearBuffer(std::move(block)));
662                 ALOGV("Input: bitstream id: %llu timestamp: %llu size: %zu",
663                       work->input.ordinal.frameIndex.peekull(),
664                       work->input.ordinal.timestamp.peekull(), size);
665             }
666 
667             work->worklets.clear();
668             work->worklets.emplace_back(new C2Worklet);
669 
670             std::list<std::unique_ptr<C2Work>> items;
671             items.push_back(std::move(work));
672 
673             // Queue the work.
674             ASSERT_EQ(component->queue_nb(&items), C2_OK);
675             numWorks++;
676 
677             if (buffer) {
678                 buffer->release();
679             }
680 
681             if (iteration == 0 && numWorks == mFlushAfterWorkIndex + 1) {
682                 // Perform flush.
683                 // Note: C2VDAComponent does not return work via |flushedWork|.
684                 ASSERT_EQ(component->flush_sm(C2Component::FLUSH_COMPONENT,
685                                               nullptr /* flushedWork */),
686                           C2_OK);
687                 break;
688             }
689 
690             if (queueDummyEOSWork) {
691                 break;
692             }
693         }
694 
695         if (iteration == 0 && mFlushAfterWorkIndex >= 0) {
696             // Wait here until client get all flushed works.
697             while (true) {
698                 ULock l(mFlushDoneLock);
699                 if (mFlushDone) {
700                     break;
701                 }
702                 mFlushDoneCondition.wait_for(l, 100ms);
703             }
704             ALOGV("Got flush done signal");
705             EXPECT_EQ(numWorks, mFlushAfterWorkIndex + 1);
706         } else {
707             EXPECT_EQ(numWorks, expectedWorkCount);
708         }
709         ASSERT_EQ(mTestVideoFile->mData->stop(), OK);
710     }
711 
712     listenerThread.join();
713     ASSERT_EQ(running, false);
714     ASSERT_EQ(component->stop(), C2_OK);
715 
716     // Finally check the decoding want as expected.
717     for (uint32_t i = 0; i < mNumberOfPlaythrough; ++i) {
718         if (mFlushAfterWorkIndex >= 0 && i == 0) {
719             EXPECT_LE(mOutputFrameCounts[i], expectedOutputFrameCounts[i]) << "At iteration: " << i;
720         } else {
721             EXPECT_EQ(mOutputFrameCounts[i], expectedOutputFrameCounts[i]) << "At iteration: " << i;
722         }
723         EXPECT_EQ(mFinishedWorkCounts[i], expectedFinishedWorkCounts[i]) << "At iteration: " << i;
724     }
725 
726     if (mSanityCheck) {
727         std::vector<std::string> goldenMD5s;
728         readGoldenMD5s(mTestVideoFile->mFilename, &goldenMD5s);
729         for (uint32_t i = 0; i < mNumberOfPlaythrough; ++i) {
730             if (mFlushAfterWorkIndex >= 0 && i == 0) {
731                 continue;  // do not compare the iteration with flushing
732             }
733             bool matched = std::find(goldenMD5s.begin(), goldenMD5s.end(), mMD5Strings[i]) !=
734                            goldenMD5s.end();
735             EXPECT_TRUE(matched) << "Unknown MD5: " << mMD5Strings[i] << " at iter: " << i;
736         }
737     }
738 }
739 
740 // Play input video once, end by draining.
741 INSTANTIATE_TEST_CASE_P(SinglePlaythroughTest, C2VDAComponentParamTest,
742                         ::testing::Values(std::make_tuple(static_cast<int>(FlushPoint::NO_FLUSH),
743                                                           1u, false, false)));
744 // Play input video once, end by dummy EOS work.
745 INSTANTIATE_TEST_CASE_P(DummyEOSWorkTest, C2VDAComponentParamTest,
746                         ::testing::Values(std::make_tuple(static_cast<int>(FlushPoint::NO_FLUSH),
747                                                           1u, false, true)));
748 
749 // Play 5 times of input video, and check sanity by MD5Sum.
750 INSTANTIATE_TEST_CASE_P(MultiplePlaythroughSanityTest, C2VDAComponentParamTest,
751                         ::testing::Values(std::make_tuple(static_cast<int>(FlushPoint::NO_FLUSH),
752                                                           5u, true, false)));
753 
754 // Test mid-stream flush then play once entirely.
755 INSTANTIATE_TEST_CASE_P(FlushPlaythroughTest, C2VDAComponentParamTest,
756                         ::testing::Values(std::make_tuple(40, 1u, true, false)));
757 
758 // Test mid-stream flush then stop.
759 INSTANTIATE_TEST_CASE_P(FlushStopTest, C2VDAComponentParamTest,
760                         ::testing::Values(std::make_tuple(
761                                 static_cast<int>(FlushPoint::MID_STREAM_FLUSH), 0u, false, false)));
762 
763 // Test early flush (after a few works) then stop.
764 INSTANTIATE_TEST_CASE_P(EarlyFlushStopTest, C2VDAComponentParamTest,
765                         ::testing::Values(std::make_tuple(0, 0u, false, false),
766                                           std::make_tuple(1, 0u, false, false),
767                                           std::make_tuple(2, 0u, false, false),
768                                           std::make_tuple(3, 0u, false, false)));
769 
770 // Test end-of-stream flush then stop.
771 INSTANTIATE_TEST_CASE_P(
772         EndOfStreamFlushStopTest, C2VDAComponentParamTest,
773         ::testing::Values(std::make_tuple(static_cast<int>(FlushPoint::END_OF_STREAM_FLUSH), 0u,
774                                           false, false)));
775 
776 }  // namespace android
777 
usage(const char * me)778 static void usage(const char* me) {
779     fprintf(stderr, "usage: %s [-i test_video_data] [-r(ecord YUV)] [gtest options]\n", me);
780 }
781 
main(int argc,char ** argv)782 int main(int argc, char** argv) {
783     ::testing::InitGoogleTest(&argc, argv);
784 
785     int res;
786     while ((res = getopt(argc, argv, "i:r")) >= 0) {
787         switch (res) {
788         case 'i': {
789             android::gTestVideoData = optarg;
790             break;
791         }
792         case 'r': {
793             android::gRecordOutputYUV = true;
794             break;
795         }
796         default: {
797             usage(argv[0]);
798             exit(1);
799             break;
800         }
801         }
802     }
803 
804     return RUN_ALL_TESTS();
805 }
806