1 /*
2 * Copyright (C) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "avmeta_meta_collector.h"
17 #include <string>
18 #include "avmetadatahelper.h"
19 #include "media_errors.h"
20 #include "media_log.h"
21 #include "gst_meta_parser.h"
22 #include "gst_utils.h"
23
24 namespace {
25 constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN, "AVMetaCollector"};
26 }
27
28 namespace OHOS {
29 namespace Media {
30 enum GstElemType : uint8_t {
31 TYPEFIND,
32 DEMUXER,
33 PARSER,
34 DECODER,
35 DECODEBIN,
36 UNKNOWN,
37 };
38
39 struct GstElemMetaMatchDesc {
40 std::string_view metaKey;
41 std::vector<std::string_view> expectedFields;
42 };
43
44 static const std::unordered_map<GstElemType, GstElemMetaMatchDesc> GST_ELEM_META_MATCH_DESC = {
45 { GstElemType::TYPEFIND, { GST_ELEMENT_METADATA_LONGNAME, { "TypeFind" } } },
46 { GstElemType::DEMUXER, { GST_ELEMENT_METADATA_KLASS, { "Codec", "Demuxer" } } },
47 { GstElemType::PARSER, { GST_ELEMENT_METADATA_KLASS, { "Codec", "Parser" } } },
48 { GstElemType::DECODER, { GST_ELEMENT_METADATA_KLASS, { "Codec", "Decoder" } } },
49 { GstElemType::DECODEBIN, { GST_ELEMENT_METADATA_LONGNAME, { "Decoder Bin" } } },
50 };
51
52 /**
53 * @brief limit the multiqueue's cache limit to avoid the waste of memory.
54 * For metadata and thubnail scene, there is no need to cache too much
55 * buffer in the queue.
56 */
57 class AVMetaMetaCollector::MultiQueueCutOut {
58 public:
MultiQueueCutOut(GstElement & mq)59 explicit MultiQueueCutOut(GstElement &mq) : mq_(GST_ELEMENT_CAST(gst_object_ref(&mq)))
60 {
61 g_object_get(mq_, "max-size-bytes", &maxBytes_, "max-size-buffers",
62 &maxBuffers_, "max-size-time", &maxTimes_, nullptr);
63 MEDIA_LOGI("mq curr maxBytes: %{public}u, maxBuffers: %{public}u, maxTimes: %{public}" PRIu64,
64 maxBytes_, maxBytes_, maxTimes_);
65
66 static constexpr uint32_t maxBytes = 2 * 1024 * 1024;
67 static constexpr uint32_t maxBuffers = 5;
68 static constexpr uint64_t maxTimes = 2 * GST_SECOND;
69 g_object_set(mq_, "max-size-bytes", maxBytes, "max-size-buffers",
70 maxBuffers, "max-size-time", maxTimes, nullptr);
71 }
72
~MultiQueueCutOut()73 ~MultiQueueCutOut()
74 {
75 if (isHiden_) {
76 gst_object_unref(mq_);
77 return;
78 }
79
80 g_object_set(mq_, "max-size-bytes", maxBytes_, "max-size-buffers",
81 maxBuffers_, "max-size-time", maxTimes_, nullptr);
82 gst_object_unref(mq_);
83 }
84
Hide()85 void Hide()
86 {
87 isHiden_ = true;
88 }
89
90 private:
91 GstElement *mq_;
92 uint32_t maxBuffers_ = 0;
93 uint32_t maxBytes_ = 0;
94 uint64_t maxTimes_ = 0;
95 bool isHiden_ = false;
96 };
97
AVMetaMetaCollector()98 AVMetaMetaCollector::AVMetaMetaCollector()
99 {
100 MEDIA_LOGD("enter ctor, instance: 0x%{public}06" PRIXPTR "", FAKE_POINTER(this));
101 }
102
~AVMetaMetaCollector()103 AVMetaMetaCollector::~AVMetaMetaCollector()
104 {
105 MEDIA_LOGD("enter dtor, instance: 0x%{public}06" PRIXPTR "", FAKE_POINTER(this));
106 std::unique_lock<std::mutex> lock(mutex_);
107 {
108 decltype(elemCollectors_) temp;
109 temp.swap(elemCollectors_);
110 }
111 {
112 decltype(blockers_) temp;
113 temp.swap(blockers_);
114 }
115 }
116
Start()117 void AVMetaMetaCollector::Start()
118 {
119 MEDIA_LOGD("start collecting...");
120
121 std::unique_lock<std::mutex> lock(mutex_);
122 if (stopCollecting_ || !allMeta_.tbl_.empty()) {
123 return;
124 }
125 }
126
AddMetaSource(GstElement & source)127 void AVMetaMetaCollector::AddMetaSource(GstElement &source)
128 {
129 MEDIA_LOGD("enter");
130
131 std::unique_lock<std::mutex> lock(mutex_);
132 if (stopCollecting_) {
133 return;
134 }
135
136 uint8_t srcType = ProbeElemType(source);
137 AddElemCollector(source, srcType);
138 AddElemBlocker(source, srcType);
139 }
140
RemoveMetaSource(GstElement & source)141 void AVMetaMetaCollector::RemoveMetaSource(GstElement &source)
142 {
143 MEDIA_LOGD("enter");
144
145 std::unique_lock<std::mutex> lock(mutex_);
146 if (stopCollecting_) {
147 return;
148 }
149
150 uint8_t srcType = ProbeElemType(source);
151 RemoveElemBlocker(source, srcType);
152 }
153
Stop(bool unlock)154 void AVMetaMetaCollector::Stop(bool unlock) /* false */
155 {
156 MEDIA_LOGD("stop collecting...");
157
158 std::unique_lock<std::mutex> lock(mutex_);
159 stopCollecting_ = true;
160 cond_.notify_all();
161
162 StopBlocker(unlock);
163
164 for (auto &elemCollector : elemCollectors_) {
165 elemCollector->Stop();
166 }
167 }
168
GetMetadata()169 std::unordered_map<int32_t, std::string> AVMetaMetaCollector::GetMetadata()
170 {
171 static constexpr int32_t timeout = 2;
172 std::unique_lock<std::mutex> lock(mutex_);
173 cond_.wait_for(lock, std::chrono::seconds(timeout), [this]() {
174 return CheckCollectCompleted() || stopCollecting_;
175 });
176
177 AdjustMimeType();
178 PopulateMeta(allMeta_);
179
180 return allMeta_.tbl_;
181 }
182
GetMetadata(int32_t key)183 std::string AVMetaMetaCollector::GetMetadata(int32_t key)
184 {
185 static constexpr int32_t timeout = 2;
186 std::unique_lock<std::mutex> lock(mutex_);
187 cond_.wait_for(lock, std::chrono::seconds(timeout), [this, key]() {
188 return stopCollecting_ || allMeta_.HasMeta(key) || CheckCollectCompleted();
189 });
190
191 AdjustMimeType();
192
193 std::string result;
194 (void)allMeta_.TryGetMeta(key, result);
195 return result;
196 }
197
IsCollecteCompleted()198 bool AVMetaMetaCollector::IsCollecteCompleted()
199 {
200 std::unique_lock<std::mutex> lock(mutex_);
201 return collectCompleted_;
202 }
203
FetchArtPicture()204 std::shared_ptr<AVSharedMemory> AVMetaMetaCollector::FetchArtPicture()
205 {
206 std::unique_lock<std::mutex> lock(mutex_);
207 std::shared_ptr<AVSharedMemory> result = nullptr;
208 for (auto &elemCollector : elemCollectors_) {
209 if (elemCollector != nullptr) {
210 result = elemCollector->FetchArtPicture();
211 }
212 if (result != nullptr) {
213 break;
214 }
215 }
216
217 return result;
218 }
219
CheckCollectCompleted()220 bool AVMetaMetaCollector::CheckCollectCompleted()
221 {
222 if (elemCollectors_.size() == 0 || blockers_.size() == 0) {
223 return false;
224 }
225
226 for (auto &collector : elemCollectors_) {
227 if (collector == nullptr) {
228 continue;
229 }
230
231 if (!collector->IsMetaCollected()) {
232 return false;
233 }
234 }
235
236 for (auto &[type, blockerVec] : blockers_) {
237 for (auto &blocker : blockerVec) {
238 if (blocker == nullptr) {
239 continue;
240 }
241 if (!blocker->IsRemoved() && !blocker->IsBufferDetected()) {
242 return false;
243 }
244 }
245 }
246
247 collectCompleted_ = true;
248 MEDIA_LOGI("collect metadata finished !");
249 return true;
250 }
251
ProbeElemType(GstElement & source)252 uint8_t AVMetaMetaCollector::ProbeElemType(GstElement &source)
253 {
254 for (const auto &[srcType, matchDesc] : GST_ELEM_META_MATCH_DESC) {
255 bool matchResult = MatchElementByMeta(source, matchDesc.metaKey, matchDesc.expectedFields);
256 if (!matchResult) {
257 continue;
258 }
259
260 std::string detailLog = "metaKey: ";
261 detailLog += matchDesc.metaKey;
262 detailLog += ", expected field: ";
263 for (const auto &fields : matchDesc.expectedFields) {
264 detailLog += fields;
265 detailLog += " ";
266 }
267 MEDIA_LOGD("find %{public}s, %{public}s", ELEM_NAME(&source), detailLog.c_str());
268
269 if (hasSrcType_.count(srcType) == 0) {
270 (void)hasSrcType_.emplace(srcType, 0);
271 }
272 hasSrcType_[srcType] += 1;
273 return srcType;
274 }
275
276 return GstElemType::UNKNOWN;
277 }
278
AdjustMimeType()279 void AVMetaMetaCollector::AdjustMimeType()
280 {
281 std::string mimeType = allMeta_.GetMeta(AV_KEY_MIME_TYPE);
282 if (mimeType.empty()) {
283 return;
284 }
285
286 if (mimeType.compare(FILE_MIMETYPE_VIDEO_MP4) == 0) {
287 std::string hasVideo = allMeta_.GetMeta(AV_KEY_HAS_VIDEO);
288 if (hasVideo.compare("yes") == 0) {
289 return;
290 }
291 std::string hasAudio = allMeta_.GetMeta(AV_KEY_HAS_AUDIO);
292 if (hasAudio.compare("yes") == 0) {
293 allMeta_.SetMeta(AV_KEY_MIME_TYPE, std::string(FILE_MIMETYPE_AUDIO_MP4));
294 return;
295 }
296 }
297 }
298
UpdataMeta(const Metadata & metadata)299 void AVMetaMetaCollector::UpdataMeta(const Metadata &metadata)
300 {
301 std::unique_lock<std::mutex> lock(mutex_);
302 if (stopCollecting_) {
303 return;
304 }
305
306 for (auto &[key, value] : metadata.tbl_) {
307 allMeta_.SetMeta(key, value);
308 }
309
310 cond_.notify_all();
311 }
312
AddElemCollector(GstElement & source,uint8_t type)313 void AVMetaMetaCollector::AddElemCollector(GstElement &source, uint8_t type)
314 {
315 if (type == GstElemType::DECODEBIN) {
316 mqCutOut_ = std::make_unique<MultiQueueCutOut>(source);
317 return;
318 }
319
320 if (type != GstElemType::TYPEFIND && type != GstElemType::DEMUXER && type != GstElemType::PARSER) {
321 return;
322 }
323
324 for (auto &collector : elemCollectors_) {
325 if (collector->GetType() == static_cast<AVMetaSourceType>(type)) {
326 collector->AddMetaSource(source);
327 return;
328 }
329 }
330
331 // already has demuxer, reject to create parser's collector
332 if ((hasSrcType_.count(GstElemType::DEMUXER) != 0) &&
333 (type != GstElemType::DEMUXER) &&
334 (type != GstElemType::TYPEFIND)) {
335 return;
336 }
337
338 auto metaUpdateCb = std::bind(&AVMetaMetaCollector::UpdataMeta, this, std::placeholders::_1);
339 auto result = AVMetaElemMetaCollector::Create(static_cast<AVMetaSourceType>(type), metaUpdateCb);
340 CHECK_AND_RETURN(result != nullptr);
341 result->AddMetaSource(source);
342 elemCollectors_.push_back(std::move(result));
343 }
344
AddElemBlocker(GstElement & source,uint8_t type)345 void AVMetaMetaCollector::AddElemBlocker(GstElement &source, uint8_t type)
346 {
347 /**
348 * After the demuxer or parser plugin of gstreamer complete the metadata resolve work,
349 * them will send one frame buffer to downstream. If there is decoder at the downstream,
350 * the decode will happened, which is unnecessary and wastefully for metadata resolving.
351 * We can block the downstream pads of demuxer to prevent the decode process happened.
352 *
353 * One kind of possible sequence of element setuped to the pipeline is :
354 * Demuxer1 --> Demuxer2 ---> Parser1 --> Decoder1
355 * |\
356 * | `-> Parser2 --> Parser3 --> Decoder2
357 * \
358 * `--> Decoder3
359 * Or:
360 * Parser1 --> Decoder1
361 * Or:
362 * Parser1 -->
363 *
364 * Therefore, we will process the block by referring to these order.
365 */
366
367 #define PUSH_NEW_BLOCK(type, blocker) \
368 do { \
369 auto typeBlockersIter = blockers_.find(type); \
370 if (typeBlockersIter == blockers_.end()) { \
371 auto ret = blockers_.emplace(type, BufferBlockerVec {}); \
372 typeBlockersIter = ret.first; \
373 } \
374 (blocker)->Init(); \
375 (void)typeBlockersIter->second.emplace_back(blocker); \
376 } while (0)
377
378 if (type == GstElemType::TYPEFIND || type == GstElemType::DECODEBIN || type == GstElemType::UNKNOWN) {
379 return;
380 }
381
382 auto notifier = [this]() {
383 // get lock to ensure the notification will take effect.
384 std::unique_lock<std::mutex> lock(mutex_);
385 cond_.notify_all();
386 };
387
388 if (type == GstElemType::DEMUXER) {
389 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, true, notifier);
390 PUSH_NEW_BLOCK(type, blocker);
391 return;
392 }
393
394 if (type == GstElemType::DECODER) {
395 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, false, notifier);
396 PUSH_NEW_BLOCK(type, blocker);
397 return;
398 }
399
400 if (type == GstElemType::PARSER) {
401 /**
402 * If there is a demuxer, we can not add blocker at the parser's srcpad, the parser
403 * maybe need to wait several packets of buffer to autoplug the decoder, which will
404 * leads to no buffer can arrived at the srcpad of parser due to the MultiQueueCutOut.
405 * Insteadly, we add the blocker at the parser's sinkpad to fix this issue.
406 *
407 */
408 if (hasSrcType_.count(GstElemType::DEMUXER) != 0) {
409 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, false, notifier);
410 PUSH_NEW_BLOCK(type, blocker);
411 } else {
412 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, true, notifier);
413 PUSH_NEW_BLOCK(type, blocker);
414 }
415 return;
416 }
417 }
418
RemoveElemBlocker(GstElement & source,uint8_t type)419 void AVMetaMetaCollector::RemoveElemBlocker(GstElement &source, uint8_t type)
420 {
421 if (type == GstElemType::TYPEFIND || type == GstElemType::DECODEBIN || type == GstElemType::UNKNOWN) {
422 return;
423 }
424 for (auto &[elemType, blockerVec] : blockers_) {
425 for (auto iter = blockerVec.begin(); iter != blockerVec.end(); ++iter) {
426 if (*iter == nullptr) {
427 continue;
428 }
429 if (strcmp((*iter)->GetElemName().c_str(), ELEM_NAME(&source)) == 0) {
430 MEDIA_LOGD("Remove %{public}s", (*iter)->GetElemName().c_str());
431 blockerVec.erase(iter);
432 return;
433 }
434 }
435 }
436 }
437
StopBlocker(bool unlock)438 void AVMetaMetaCollector::StopBlocker(bool unlock)
439 {
440 for (auto &[type, blockerVec] : blockers_) {
441 for (auto &blocker : blockerVec) {
442 if (blocker == nullptr) {
443 continue;
444 }
445 // place the if-else at the for-loop for cyclomatic complexity
446 if (unlock) {
447 blocker->Remove();
448 } else {
449 blocker->Hide();
450 }
451 }
452 }
453
454 if (mqCutOut_ != nullptr && !unlock) {
455 mqCutOut_->Hide();
456 }
457 mqCutOut_ = nullptr; // restore the mq's cache limit
458 }
459 } // namespace Media
460 } // namespace OHOS
461