1 /*
2 * Copyright (C) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "avmeta_meta_collector.h"
17 #include "avmetadatahelper.h"
18 #include "media_errors.h"
19 #include "media_log.h"
20 #include "gst_meta_parser.h"
21 #include "gst_utils.h"
22
23 namespace {
24 constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN, "AVMetaCollector"};
25 }
26
27 namespace OHOS {
28 namespace Media {
29 enum GstElemType : uint8_t {
30 TYPEFIND,
31 DEMUXER,
32 PARSER,
33 DECODER,
34 DECODEBIN,
35 UNKNOWN,
36 };
37
38 struct GstElemMetaMatchDesc {
39 std::string_view metaKey;
40 std::vector<std::string_view> expectedFields;
41 };
42
43 static const std::unordered_map<GstElemType, GstElemMetaMatchDesc> GST_ELEM_META_MATCH_DESC = {
44 { GstElemType::TYPEFIND, { GST_ELEMENT_METADATA_LONGNAME, { "TypeFind" } } },
45 { GstElemType::DEMUXER, { GST_ELEMENT_METADATA_KLASS, { "Codec", "Demuxer" } } },
46 { GstElemType::PARSER, { GST_ELEMENT_METADATA_KLASS, { "Codec", "Parser" } } },
47 { GstElemType::DECODER, { GST_ELEMENT_METADATA_KLASS, { "Codec", "Decoder" } } },
48 { GstElemType::DECODEBIN, { GST_ELEMENT_METADATA_LONGNAME, { "Decoder Bin" } } },
49 };
50
51 /**
52 * @brief limit the multiqueue's cache limit to avoid the waste of memory.
53 * For metadata and thubnail scene, there is no need to cache too much
54 * buffer in the queue.
55 */
56 class AVMetaMetaCollector::MultiQueueCutOut {
57 public:
MultiQueueCutOut(GstElement & mq)58 explicit MultiQueueCutOut(GstElement &mq) : mq_(GST_ELEMENT_CAST(gst_object_ref(&mq)))
59 {
60 g_object_get(mq_, "max-size-bytes", &maxBytes_, "max-size-buffers",
61 &maxBuffers_, "max-size-time", &maxTimes_, nullptr);
62 MEDIA_LOGI("mq curr maxBytes: %{public}u, maxBuffers: %{public}u, maxTimes: %{public}" PRIu64,
63 maxBytes_, maxBytes_, maxTimes_);
64
65 static constexpr uint32_t maxBytes = 2 * 1024 * 1024;
66 static constexpr uint32_t maxBuffers = 5;
67 static constexpr uint64_t maxTimes = 2 * GST_SECOND;
68 g_object_set(mq_, "max-size-bytes", maxBytes, "max-size-buffers",
69 maxBuffers, "max-size-time", maxTimes, nullptr);
70 }
71
~MultiQueueCutOut()72 ~MultiQueueCutOut()
73 {
74 if (isHiden_) {
75 gst_object_unref(mq_);
76 return;
77 }
78
79 g_object_set(mq_, "max-size-bytes", maxBytes_, "max-size-buffers",
80 maxBuffers_, "max-size-time", maxTimes_, nullptr);
81 gst_object_unref(mq_);
82 }
83
Hide()84 void Hide()
85 {
86 isHiden_ = true;
87 }
88
89 private:
90 GstElement *mq_;
91 uint32_t maxBuffers_ = 0;
92 uint32_t maxBytes_ = 0;
93 uint64_t maxTimes_ = 0;
94 bool isHiden_ = false;
95 };
96
AVMetaMetaCollector()97 AVMetaMetaCollector::AVMetaMetaCollector()
98 {
99 MEDIA_LOGD("enter ctor, instance: 0x%{public}06" PRIXPTR "", FAKE_POINTER(this));
100 }
101
~AVMetaMetaCollector()102 AVMetaMetaCollector::~AVMetaMetaCollector()
103 {
104 MEDIA_LOGD("enter dtor, instance: 0x%{public}06" PRIXPTR "", FAKE_POINTER(this));
105 std::unique_lock<std::mutex> lock(mutex_);
106 {
107 decltype(elemCollectors_) temp;
108 temp.swap(elemCollectors_);
109 }
110 {
111 decltype(blockers_) temp;
112 temp.swap(blockers_);
113 }
114 }
115
Start()116 void AVMetaMetaCollector::Start()
117 {
118 MEDIA_LOGD("start collecting...");
119
120 std::unique_lock<std::mutex> lock(mutex_);
121 if (stopCollecting_ || !allMeta_.tbl_.empty()) {
122 return;
123 }
124 }
125
AddMetaSource(GstElement & source)126 void AVMetaMetaCollector::AddMetaSource(GstElement &source)
127 {
128 MEDIA_LOGD("enter");
129
130 std::unique_lock<std::mutex> lock(mutex_);
131 if (stopCollecting_) {
132 return;
133 }
134
135 uint8_t srcType = ProbeElemType(source);
136 AddElemCollector(source, srcType);
137 AddElemBlocker(source, srcType);
138 }
139
Stop(bool unlock)140 void AVMetaMetaCollector::Stop(bool unlock) /* false */
141 {
142 MEDIA_LOGD("stop collecting...");
143
144 std::unique_lock<std::mutex> lock(mutex_);
145 stopCollecting_ = true;
146 cond_.notify_all();
147
148 StopBlocker(unlock);
149
150 for (auto &elemCollector : elemCollectors_) {
151 elemCollector->Stop();
152 }
153 }
154
GetMetadata()155 std::unordered_map<int32_t, std::string> AVMetaMetaCollector::GetMetadata()
156 {
157 static constexpr int32_t timeout = 2;
158 std::unique_lock<std::mutex> lock(mutex_);
159 cond_.wait_for(lock, std::chrono::seconds(timeout), [this]() {
160 return CheckCollectCompleted() || stopCollecting_;
161 });
162
163 AdjustMimeType();
164 PopulateMeta(allMeta_);
165
166 return allMeta_.tbl_;
167 }
168
GetMetadata(int32_t key)169 std::string AVMetaMetaCollector::GetMetadata(int32_t key)
170 {
171 static constexpr int32_t timeout = 2;
172 std::unique_lock<std::mutex> lock(mutex_);
173 cond_.wait_for(lock, std::chrono::seconds(timeout), [this, key]() {
174 return stopCollecting_ || allMeta_.HasMeta(key) || CheckCollectCompleted();
175 });
176
177 AdjustMimeType();
178
179 std::string result;
180 (void)allMeta_.TryGetMeta(key, result);
181 return result;
182 }
183
IsCollecteCompleted()184 bool AVMetaMetaCollector::IsCollecteCompleted()
185 {
186 std::unique_lock<std::mutex> lock(mutex_);
187 return collectCompleted_;
188 }
189
FetchArtPicture()190 std::shared_ptr<AVSharedMemory> AVMetaMetaCollector::FetchArtPicture()
191 {
192 std::unique_lock<std::mutex> lock(mutex_);
193 cond_.wait(lock, [this]() { return CheckCollectCompleted() || stopCollecting_; });
194
195 std::shared_ptr<AVSharedMemory> result = nullptr;
196 for (auto &elemCollector : elemCollectors_) {
197 if (elemCollector != nullptr) {
198 result = elemCollector->FetchArtPicture();
199 }
200 if (result != nullptr) {
201 break;
202 }
203 }
204
205 return result;
206 }
207
CheckCollectCompleted()208 bool AVMetaMetaCollector::CheckCollectCompleted()
209 {
210 if (elemCollectors_.size() == 0 || blockers_.size() == 0) {
211 return false;
212 }
213
214 for (auto &collector : elemCollectors_) {
215 if (collector == nullptr) {
216 continue;
217 }
218
219 if (!collector->IsMetaCollected()) {
220 return false;
221 }
222 }
223
224 for (auto &[type, blockerVec] : blockers_) {
225 for (auto &blocker : blockerVec) {
226 if (blocker == nullptr) {
227 continue;
228 }
229 if (!blocker->IsRemoved() && !blocker->IsBufferDetected()) {
230 return false;
231 }
232 }
233 }
234
235 collectCompleted_ = true;
236 MEDIA_LOGI("collect metadata finished !");
237 return true;
238 }
239
ProbeElemType(GstElement & source)240 uint8_t AVMetaMetaCollector::ProbeElemType(GstElement &source)
241 {
242 for (const auto &[srcType, matchDesc] : GST_ELEM_META_MATCH_DESC) {
243 bool matchResult = MatchElementByMeta(source, matchDesc.metaKey, matchDesc.expectedFields);
244 if (!matchResult) {
245 continue;
246 }
247
248 std::string detailLog = "metaKey: ";
249 detailLog += matchDesc.metaKey;
250 detailLog += ", expected field: ";
251 for (auto &fields : matchDesc.expectedFields) {
252 detailLog += fields;
253 detailLog += " ";
254 }
255 MEDIA_LOGD("find %{public}s, %{public}s", ELEM_NAME(&source), detailLog.c_str());
256
257 if (hasSrcType_.count(srcType) == 0) {
258 (void)hasSrcType_.emplace(srcType, 0);
259 }
260 hasSrcType_[srcType] += 1;
261 return srcType;
262 }
263
264 return GstElemType::UNKNOWN;
265 }
266
AdjustMimeType()267 void AVMetaMetaCollector::AdjustMimeType()
268 {
269 std::string mimeType = allMeta_.GetMeta(AV_KEY_MIME_TYPE);
270 if (mimeType.empty()) {
271 return;
272 }
273
274 if (mimeType.compare(FILE_MIMETYPE_VIDEO_MP4) == 0) {
275 std::string hasVideo = allMeta_.GetMeta(AV_KEY_HAS_VIDEO);
276 if (hasVideo.compare("yes") == 0) {
277 return;
278 }
279 std::string hasAudio = allMeta_.GetMeta(AV_KEY_HAS_AUDIO);
280 if (hasAudio.compare("yes") == 0) {
281 allMeta_.SetMeta(AV_KEY_MIME_TYPE, std::string(FILE_MIMETYPE_AUDIO_MP4));
282 return;
283 }
284 }
285 }
286
UpdataMeta(const Metadata & metadata)287 void AVMetaMetaCollector::UpdataMeta(const Metadata &metadata)
288 {
289 std::unique_lock<std::mutex> lock(mutex_);
290 if (stopCollecting_) {
291 return;
292 }
293
294 for (auto &[key, value] : metadata.tbl_) {
295 allMeta_.SetMeta(key, value);
296 }
297
298 cond_.notify_all();
299 }
300
AddElemCollector(GstElement & source,uint8_t type)301 void AVMetaMetaCollector::AddElemCollector(GstElement &source, uint8_t type)
302 {
303 if (type == GstElemType::DECODEBIN) {
304 mqCutOut_ = std::make_unique<MultiQueueCutOut>(source);
305 return;
306 }
307
308 if (type != GstElemType::TYPEFIND && type != GstElemType::DEMUXER && type != GstElemType::PARSER) {
309 return;
310 }
311
312 for (auto &collector : elemCollectors_) {
313 if (collector->GetType() == static_cast<AVMetaSourceType>(type)) {
314 collector->AddMetaSource(source);
315 return;
316 }
317 }
318
319 // already has demuxer, reject to create parser's collector
320 if ((hasSrcType_.count(GstElemType::DEMUXER) != 0) &&
321 (type != GstElemType::DEMUXER) &&
322 (type != GstElemType::TYPEFIND)) {
323 return;
324 }
325
326 auto metaUpdateCb = std::bind(&AVMetaMetaCollector::UpdataMeta, this, std::placeholders::_1);
327 auto result = AVMetaElemMetaCollector::Create(static_cast<AVMetaSourceType>(type), metaUpdateCb);
328 CHECK_AND_RETURN(result != nullptr);
329 result->AddMetaSource(source);
330 elemCollectors_.push_back(std::move(result));
331 }
332
AddElemBlocker(GstElement & source,uint8_t type)333 void AVMetaMetaCollector::AddElemBlocker(GstElement &source, uint8_t type)
334 {
335 /**
336 * After the demuxer or parser plugin of gstreamer complete the metadata resolve work,
337 * them will send one frame buffer to downstream. If there is decoder at the downstream,
338 * the decode will happened, which is unnecessary and wastefully for metadata resolving.
339 * We can block the downstream pads of demuxer to prevent the decode process happened.
340 *
341 * One kind of possible sequence of element setuped to the pipeline is :
342 * Demuxer1 --> Demuxer2 ---> Parser1 --> Decoder1
343 * |\
344 * | `-> Parser2 --> Parser3 --> Decoder2
345 * \
346 * `--> Decoder3
347 * Or:
348 * Parser1 --> Decoder1
349 * Or:
350 * Parser1 -->
351 *
352 * Therefore, we will process the block by referring to these order.
353 */
354
355 #define PUSH_NEW_BLOCK(type, blocker) \
356 do { \
357 auto typeBlockersIter = blockers_.find(type); \
358 if (typeBlockersIter == blockers_.end()) { \
359 auto ret = blockers_.emplace(type, BufferBlockerVec {}); \
360 typeBlockersIter = ret.first; \
361 } \
362 (blocker)->Init(); \
363 (void)typeBlockersIter->second.emplace_back(blocker); \
364 } while (0)
365
366 if (type == GstElemType::TYPEFIND || type == GstElemType::DECODEBIN || type == GstElemType::UNKNOWN) {
367 return;
368 }
369
370 auto notifier = [this]() {
371 // get lock to ensure the notification will take effect.
372 std::unique_lock<std::mutex> lock(mutex_);
373 cond_.notify_all();
374 };
375
376 if (type == GstElemType::DEMUXER) {
377 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, true, notifier);
378 PUSH_NEW_BLOCK(type, blocker);
379 return;
380 }
381
382 if (type == GstElemType::DECODER) {
383 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, false, notifier);
384 PUSH_NEW_BLOCK(type, blocker);
385 return;
386 }
387
388 if (type == GstElemType::PARSER) {
389 /**
390 * If there is a demuxer, we can not add blocker at the parser's srcpad, the parser
391 * maybe need to wait several packets of buffer to autoplug the decoder, which will
392 * leads to no buffer can arrived at the srcpad of parser due to the MultiQueueCutOut.
393 * Insteadly, we add the blocker at the parser's sinkpad to fix this issue.
394 *
395 */
396 if (hasSrcType_.count(GstElemType::DEMUXER) != 0) {
397 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, false, notifier);
398 PUSH_NEW_BLOCK(type, blocker);
399 } else {
400 auto blocker = std::make_shared<AVMetaBufferBlocker>(source, true, notifier);
401 PUSH_NEW_BLOCK(type, blocker);
402 }
403 return;
404 }
405 }
406
StopBlocker(bool unlock)407 void AVMetaMetaCollector::StopBlocker(bool unlock)
408 {
409 for (auto &[type, blockerVec] : blockers_) {
410 for (auto &blocker : blockerVec) {
411 if (blocker == nullptr) {
412 continue;
413 }
414 // place the if-else at the for-loop for cyclomatic complexity
415 if (unlock) {
416 blocker->Remove();
417 } else {
418 blocker->Hide();
419 }
420 }
421 }
422
423 if (mqCutOut_ != nullptr && !unlock) {
424 mqCutOut_->Hide();
425 }
426 mqCutOut_ = nullptr; // restore the mq's cache limit
427 }
428 } // namespace Media
429 } // namespace OHOS
430