1 /*
2 * Copyright (c) 2021-2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifdef VIDEO_SUPPORT
17
18 #define HST_LOG_TAG "FfmpegVideoDecoderPlugin"
19
20 #include "video_ffmpeg_decoder_plugin.h"
21 #include <cstring>
22 #include <map>
23 #include <set>
24 #include "plugin/common/plugin_caps_builder.h"
25 #include "plugin/common/surface_memory.h"
26 #include "plugins/ffmpeg_adapter/utils/ffmpeg_utils.h"
27
28 namespace {
29 // register plugins
30 using namespace OHOS::Media::Plugin;
31 using namespace Ffmpeg;
32 void UpdatePluginDefinition(const AVCodec* codec, CodecPluginDef& definition);
33
34 std::map<std::string, std::shared_ptr<const AVCodec>> codecMap;
35
36 constexpr size_t BUFFER_QUEUE_SIZE = 8;
37 constexpr int32_t STRIDE_ALIGN = 16;
38
39 std::set<AVCodecID> supportedCodec = {AV_CODEC_ID_H264};
40
VideoFfmpegDecoderCreator(const std::string & name)41 std::shared_ptr<CodecPlugin> VideoFfmpegDecoderCreator(const std::string& name)
42 {
43 return std::make_shared<VideoFfmpegDecoderPlugin>(name);
44 }
45
RegisterVideoDecoderPlugins(const std::shared_ptr<Register> & reg)46 Status RegisterVideoDecoderPlugins(const std::shared_ptr<Register>& reg)
47 {
48 const AVCodec* codec = nullptr;
49 void* iter = nullptr;
50 MEDIA_LOG_I("registering video decoders");
51 while ((codec = av_codec_iterate(&iter))) {
52 if (!av_codec_is_decoder(codec) || codec->type != AVMEDIA_TYPE_VIDEO) {
53 continue;
54 }
55 if (supportedCodec.find(codec->id) == supportedCodec.end()) {
56 MEDIA_LOG_DD("codec " PUBLIC_LOG_S "(" PUBLIC_LOG_S ") is not supported right now",
57 codec->name, codec->long_name);
58 continue;
59 }
60 CodecPluginDef definition;
61 definition.name = "videodecoder_" + std::string(codec->name);
62 definition.pluginType = PluginType::VIDEO_DECODER;
63 definition.rank = 100; // 100
64 definition.creator = VideoFfmpegDecoderCreator;
65 UpdatePluginDefinition(codec, definition);
66 // do not delete the codec in the deleter
67 codecMap[definition.name] = std::shared_ptr<AVCodec>(const_cast<AVCodec*>(codec), [](void* ptr) {});
68 if (reg->AddPlugin(definition) != Status::OK) {
69 MEDIA_LOG_W("register plugin " PUBLIC_LOG_S "(" PUBLIC_LOG_S ") failed",
70 codec->name, codec->long_name);
71 }
72 }
73 return Status::OK;
74 }
75
UnRegisterVideoDecoderPlugins()76 void UnRegisterVideoDecoderPlugins()
77 {
78 codecMap.clear();
79 }
80
UpdateInCaps(const AVCodec * codec,CodecPluginDef & definition)81 void UpdateInCaps(const AVCodec* codec, CodecPluginDef& definition)
82 {
83 CapabilityBuilder incapBuilder;
84 switch (codec->id) {
85 case AV_CODEC_ID_H264:
86 incapBuilder.SetMime(OHOS::Media::MEDIA_MIME_VIDEO_H264);
87 incapBuilder.SetVideoBitStreamFormatList({VideoBitStreamFormat::AVC1, VideoBitStreamFormat::ANNEXB});
88 break;
89 default:
90 incapBuilder.SetMime("video/unknown");
91 MEDIA_LOG_I("codec is not supported right now");
92 break;
93 }
94 definition.inCaps.push_back(incapBuilder.Build());
95 }
96
UpdateOutCaps(const AVCodec * codec,CodecPluginDef & definition)97 void UpdateOutCaps(const AVCodec* codec, CodecPluginDef& definition)
98 {
99 CapabilityBuilder outcapBuilder;
100 outcapBuilder.SetMime(OHOS::Media::MEDIA_MIME_VIDEO_RAW);
101 if (codec->pix_fmts != nullptr) {
102 DiscreteCapability<VideoPixelFormat> values;
103 size_t index = 0;
104 for (index = 0; codec->pix_fmts[index] != 0; ++index) {
105 auto supportFormat = ConvertPixelFormatFromFFmpeg(codec->pix_fmts[index]);
106 if (supportFormat != VideoPixelFormat::UNKNOWN) {
107 values.push_back(supportFormat);
108 }
109 }
110 if (index) {
111 outcapBuilder.SetVideoPixelFormatList(values);
112 }
113 }
114 definition.outCaps.push_back(outcapBuilder.Build());
115 }
116
UpdatePluginDefinition(const AVCodec * codec,CodecPluginDef & definition)117 void UpdatePluginDefinition(const AVCodec* codec, CodecPluginDef& definition)
118 {
119 UpdateInCaps(codec, definition);
120 UpdateOutCaps(codec, definition);
121 }
122 } // namespace
123
124 PLUGIN_DEFINITION(FFmpegVideoDecoders, LicenseType::LGPL, RegisterVideoDecoderPlugins, UnRegisterVideoDecoderPlugins);
125
126 namespace OHOS {
127 namespace Media {
128 namespace Plugin {
VideoFfmpegDecoderPlugin(std::string name)129 VideoFfmpegDecoderPlugin::VideoFfmpegDecoderPlugin(std::string name)
130 : CodecPlugin(std::move(name)), outBufferQ_("vdecPluginQueue", BUFFER_QUEUE_SIZE)
131 {
132 for (int32_t i = 0; i < AV_NUM_DATA_POINTERS; i++) {
133 scaleData_[i] = nullptr;
134 scaleLineSize_[i] = 0;
135 }
136 isAllocScaleData_ = false;
137 }
138
Init()139 Status VideoFfmpegDecoderPlugin::Init()
140 {
141 OSAL::ScopedLock l(avMutex_);
142 auto iter = codecMap.find(pluginName_);
143 if (iter == codecMap.end()) {
144 MEDIA_LOG_W("cannot find codec with name " PUBLIC_LOG_S, pluginName_.c_str());
145 return Status::ERROR_UNSUPPORTED_FORMAT;
146 }
147 avCodec_ = iter->second;
148 cachedFrame_ = std::shared_ptr<AVFrame>(av_frame_alloc(), [](AVFrame* fp) { av_frame_free(&fp); });
149 videoDecParams_[Tag::REQUIRED_OUT_BUFFER_CNT] = (uint32_t)BUFFER_QUEUE_SIZE;
150 if (!decodeTask_) {
151 decodeTask_ = std::make_shared<OHOS::Media::OSAL::Task>("videoFfmpegDecThread");
152 decodeTask_->RegisterHandler([this] { ReceiveFrameBuffer(); });
153 }
154 state_ = State::INITIALIZED;
155 MEDIA_LOG_I("Init success");
156 return Status::OK;
157 }
158
Deinit()159 Status VideoFfmpegDecoderPlugin::Deinit()
160 {
161 OSAL::ScopedLock l(avMutex_);
162 avCodec_.reset();
163 cachedFrame_.reset();
164 ResetLocked();
165 if (decodeTask_) {
166 decodeTask_->Stop();
167 decodeTask_.reset();
168 }
169 if (scale_) {
170 scale_.reset();
171 }
172 state_ = State::DESTROYED;
173 return Status::OK;
174 }
175
SetParameter(Tag tag,const ValueType & value)176 Status VideoFfmpegDecoderPlugin::SetParameter(Tag tag, const ValueType& value)
177 {
178 OSAL::ScopedLock l(avMutex_);
179 if (videoDecParams_.count(tag)) {
180 videoDecParams_[tag] = value;
181 } else {
182 videoDecParams_.insert(std::make_pair(tag, value));
183 }
184 return Status::OK;
185 }
186
GetParameter(Tag tag,ValueType & value)187 Status VideoFfmpegDecoderPlugin::GetParameter(Tag tag, ValueType& value)
188 {
189 OSAL::ScopedLock l(avMutex_);
190 auto res = videoDecParams_.find(tag);
191 if (res != videoDecParams_.end()) {
192 value = res->second;
193 return Status::OK;
194 }
195 return Status::ERROR_INVALID_PARAMETER;
196 }
197
198 template <typename T>
FindInParameterMapThenAssignLocked(Tag tag,T & assign)199 void VideoFfmpegDecoderPlugin::FindInParameterMapThenAssignLocked(Tag tag, T& assign)
200 {
201 auto iter = videoDecParams_.find(tag);
202 if (iter != videoDecParams_.end() && Any::IsSameTypeWith<T>(iter->second)) {
203 assign = Plugin::AnyCast<T>(iter->second);
204 } else {
205 MEDIA_LOG_W("parameter " PUBLIC_LOG_D32 " is not found or type mismatch", static_cast<int32_t>(tag));
206 }
207 }
208
CreateCodecContext()209 Status VideoFfmpegDecoderPlugin::CreateCodecContext()
210 {
211 auto context = avcodec_alloc_context3(avCodec_.get());
212 if (context == nullptr) {
213 MEDIA_LOG_E("cannot allocate codec context");
214 return Status::ERROR_UNKNOWN;
215 }
216 avCodecContext_ = std::shared_ptr<AVCodecContext>(context, [](AVCodecContext* ptr) {
217 if (ptr != nullptr) {
218 if (ptr->extradata) {
219 av_free(ptr->extradata);
220 ptr->extradata = nullptr;
221 }
222 avcodec_free_context(&ptr);
223 }
224 });
225 MEDIA_LOG_I("CreateCodecContext success");
226 return Status::OK;
227 }
228
InitCodecContext()229 void VideoFfmpegDecoderPlugin::InitCodecContext()
230 {
231 avCodecContext_->codec_type = AVMEDIA_TYPE_VIDEO;
232 FindInParameterMapThenAssignLocked<int64_t>(Tag::MEDIA_BITRATE, avCodecContext_->bit_rate);
233 FindInParameterMapThenAssignLocked<std::uint32_t>(Tag::VIDEO_WIDTH, width_);
234 FindInParameterMapThenAssignLocked<std::uint32_t>(Tag::VIDEO_HEIGHT, height_);
235 FindInParameterMapThenAssignLocked<Plugin::VideoPixelFormat>(Tag::VIDEO_PIXEL_FORMAT, pixelFormat_);
236 MEDIA_LOG_D("bitRate: " PUBLIC_LOG_D64 ", width: " PUBLIC_LOG_U32 ", height: " PUBLIC_LOG_U32
237 ", pixelFormat: " PUBLIC_LOG_U32, avCodecContext_->bit_rate, width_, height_, pixelFormat_);
238 SetCodecExtraData();
239 // Reset coded_width/_height to prevent it being reused from last time when
240 // the codec is opened again, causing a mismatch and possible segfault/corruption.
241 avCodecContext_->coded_width = 0;
242 avCodecContext_->coded_height = 0;
243 avCodecContext_->workaround_bugs =
244 static_cast<uint32_t>(avCodecContext_->workaround_bugs) | static_cast<uint32_t>(FF_BUG_AUTODETECT);
245 avCodecContext_->err_recognition = 1;
246 }
247
DeinitCodecContext()248 void VideoFfmpegDecoderPlugin::DeinitCodecContext()
249 {
250 if (avCodecContext_ == nullptr) {
251 return;
252 }
253 if (avCodecContext_->extradata) {
254 av_free(avCodecContext_->extradata);
255 avCodecContext_->extradata = nullptr;
256 }
257 avCodecContext_->extradata_size = 0;
258 avCodecContext_->opaque = nullptr;
259 avCodecContext_->width = 0;
260 avCodecContext_->height = 0;
261 avCodecContext_->coded_width = 0;
262 avCodecContext_->coded_height = 0;
263 avCodecContext_->time_base.den = 0;
264 avCodecContext_->time_base.num = 0;
265 avCodecContext_->ticks_per_frame = 0;
266 avCodecContext_->sample_aspect_ratio.num = 0;
267 avCodecContext_->sample_aspect_ratio.den = 0;
268 avCodecContext_->get_buffer2 = nullptr;
269 }
270
SetCodecExtraData()271 void VideoFfmpegDecoderPlugin::SetCodecExtraData()
272 {
273 auto iter = videoDecParams_.find(Tag::MEDIA_CODEC_CONFIG);
274 if (iter == videoDecParams_.end() || Any::IsSameTypeWith<std::vector<uint8_t>>(iter->second)) {
275 return;
276 }
277 auto codecConfig = Plugin::AnyCast<std::vector<uint8_t>>(iter->second);
278 int configSize = static_cast<int>(codecConfig.size());
279 if (configSize > 0) {
280 auto allocSize = AlignUp(configSize + AV_INPUT_BUFFER_PADDING_SIZE, STRIDE_ALIGN);
281 avCodecContext_->extradata = static_cast<uint8_t*>(av_mallocz(allocSize));
282 auto ret = memcpy_s(avCodecContext_->extradata, configSize, codecConfig.data(), configSize);
283 if (ret != EOK) {
284 MEDIA_LOG_W("memcpy into buffer failed with code " PUBLIC_LOG_D32, ret);
285 }
286 avCodecContext_->extradata_size = configSize;
287 MEDIA_LOG_I("SetCodecExtraData success");
288 }
289 }
290
OpenCodecContext()291 Status VideoFfmpegDecoderPlugin::OpenCodecContext()
292 {
293 const AVCodec* vdec = avcodec_find_decoder(avCodecContext_->codec_id);
294 if (vdec == nullptr) {
295 MEDIA_LOG_E("Codec: " PUBLIC_LOG_D32 " is not found", static_cast<int32_t>(avCodecContext_->codec_id));
296 DeinitCodecContext();
297 return Status::ERROR_INVALID_PARAMETER;
298 }
299 auto res = avcodec_open2(avCodecContext_.get(), avCodec_.get(), nullptr);
300 if (res != 0) {
301 MEDIA_LOG_E("avcodec open error " PUBLIC_LOG_S " when start decoder ", AVStrError(res).c_str());
302 DeinitCodecContext();
303 return Status::ERROR_UNKNOWN;
304 }
305 MEDIA_LOG_I("OpenCodecContext success");
306 return Status::OK;
307 }
308
CloseCodecContext()309 Status VideoFfmpegDecoderPlugin::CloseCodecContext()
310 {
311 Status ret = Status::OK;
312 if (avCodecContext_ != nullptr) {
313 auto res = avcodec_close(avCodecContext_.get());
314 if (res != 0) {
315 DeinitCodecContext();
316 MEDIA_LOG_E("avcodec close error " PUBLIC_LOG_S " when stop decoder", AVStrError(res).c_str());
317 ret = Status::ERROR_UNKNOWN;
318 }
319 avCodecContext_.reset();
320 }
321 return ret;
322 }
323
Prepare()324 Status VideoFfmpegDecoderPlugin::Prepare()
325 {
326 {
327 OSAL::ScopedLock l(avMutex_);
328 if (state_ != State::INITIALIZED && state_ != State::PREPARED) {
329 return Status::ERROR_WRONG_STATE;
330 }
331 if (CreateCodecContext() != Status::OK) {
332 MEDIA_LOG_E("Create codec context fail");
333 return Status::ERROR_UNKNOWN;
334 }
335 InitCodecContext();
336 #ifdef DUMP_RAW_DATA
337 dumpFd_ = std::fopen("./vdec_out.yuv", "wb");
338 #endif
339 state_ = State::PREPARED;
340 }
341 avPacket_ = std::shared_ptr<AVPacket>(av_packet_alloc(), [](AVPacket* ptr) {
342 av_packet_free(&ptr);
343 });
344 outBufferQ_.SetActive(true);
345 MEDIA_LOG_I("Prepare success");
346 return Status::OK;
347 }
348
ResetLocked()349 Status VideoFfmpegDecoderPlugin::ResetLocked()
350 {
351 videoDecParams_.clear();
352 avCodecContext_.reset();
353 outBufferQ_.Clear();
354 if (scaleData_[0] != nullptr) {
355 if (isAllocScaleData_) {
356 av_free(scaleData_[0]);
357 isAllocScaleData_ = false;
358 }
359 for (int32_t i = 0; i < AV_NUM_DATA_POINTERS; i++) {
360 scaleData_[i] = nullptr;
361 scaleLineSize_[i] = 0;
362 }
363 }
364 #ifdef DUMP_RAW_DATA
365 if (dumpFd_) {
366 std::fclose(dumpFd_);
367 dumpFd_ = nullptr;
368 }
369 #endif
370 state_ = State::INITIALIZED;
371 return Status::OK;
372 }
373
Reset()374 Status VideoFfmpegDecoderPlugin::Reset()
375 {
376 OSAL::ScopedLock l(avMutex_);
377 return ResetLocked();
378 }
379
Start()380 Status VideoFfmpegDecoderPlugin::Start()
381 {
382 {
383 OSAL::ScopedLock l(avMutex_);
384 if (state_ != State::PREPARED) {
385 return Status::ERROR_WRONG_STATE;
386 }
387 if (OpenCodecContext() != Status::OK) {
388 MEDIA_LOG_E("Open codec context fail");
389 return Status::ERROR_UNKNOWN;
390 }
391 state_ = State::RUNNING;
392 }
393 outBufferQ_.SetActive(true);
394 decodeTask_->Start();
395 MEDIA_LOG_I("Start success");
396 return Status::OK;
397 }
398
Stop()399 Status VideoFfmpegDecoderPlugin::Stop()
400 {
401 Status ret = Status::OK;
402 {
403 OSAL::ScopedLock l(avMutex_);
404 ret = CloseCodecContext();
405 #ifdef DUMP_RAW_DATA
406 if (dumpFd_) {
407 std::fclose(dumpFd_);
408 dumpFd_ = nullptr;
409 }
410 #endif
411 state_ = State::INITIALIZED;
412 }
413 outBufferQ_.SetActive(false);
414 decodeTask_->Stop();
415 MEDIA_LOG_I("Stop success");
416 return ret;
417 }
418
QueueOutputBuffer(const std::shared_ptr<Buffer> & outputBuffer,int32_t timeoutMs)419 Status VideoFfmpegDecoderPlugin::QueueOutputBuffer(const std::shared_ptr<Buffer>& outputBuffer, int32_t timeoutMs)
420 {
421 outBufferQ_.Push(outputBuffer);
422 MEDIA_LOG_DD("QueueOutputBuffer success");
423 return Status::OK;
424 }
425
Flush()426 Status VideoFfmpegDecoderPlugin::Flush()
427 {
428 OSAL::ScopedLock l(avMutex_);
429 if (avCodecContext_ != nullptr) {
430 avcodec_flush_buffers(avCodecContext_.get());
431 }
432 return Status::OK;
433 }
434
QueueInputBuffer(const std::shared_ptr<Buffer> & inputBuffer,int32_t timeoutMs)435 Status VideoFfmpegDecoderPlugin::QueueInputBuffer(const std::shared_ptr<Buffer>& inputBuffer, int32_t timeoutMs)
436 {
437 if (inputBuffer->IsEmpty() && !(inputBuffer->flag & BUFFER_FLAG_EOS)) {
438 MEDIA_LOG_E("decoder does not support fd buffer");
439 return Status::ERROR_INVALID_DATA;
440 }
441 Status ret = Status::OK;
442 {
443 OSAL::ScopedLock l(avMutex_);
444 ret = SendBufferLocked(inputBuffer);
445 }
446 NotifyInputBufferDone(inputBuffer);
447 MEDIA_LOG_DD("QueueInputBuffer ret: " PUBLIC_LOG_U32, ret);
448 return ret;
449 }
450
SendBufferLocked(const std::shared_ptr<Buffer> & inputBuffer)451 Status VideoFfmpegDecoderPlugin::SendBufferLocked(const std::shared_ptr<Buffer>& inputBuffer)
452 {
453 if (state_ != State::RUNNING) {
454 MEDIA_LOG_W("SendBufferLocked in wrong state: " PUBLIC_LOG_D32, state_);
455 return Status::ERROR_WRONG_STATE;
456 }
457 if (inputBuffer && !(inputBuffer->flag & BUFFER_FLAG_EOS)) {
458 auto inputMemory = inputBuffer->GetMemory();
459 if (inputMemory == nullptr) {
460 MEDIA_LOG_E("SendBufferLocked inputBuffer GetMemory nullptr");
461 return Status::ERROR_UNKNOWN;
462 }
463 const uint8_t* ptr = inputMemory->GetReadOnlyData();
464 auto bufferLength = inputMemory->GetSize();
465 size_t bufferEnd = bufferLength;
466 // pad to data if needed
467 if ((bufferLength % AV_INPUT_BUFFER_PADDING_SIZE != 0) &&
468 (bufferLength - bufferEnd + bufferLength % AV_INPUT_BUFFER_PADDING_SIZE < AV_INPUT_BUFFER_PADDING_SIZE)) {
469 if (paddedBufferSize_ < bufferLength + AV_INPUT_BUFFER_PADDING_SIZE) {
470 paddedBufferSize_ = bufferLength + AV_INPUT_BUFFER_PADDING_SIZE;
471 paddedBuffer_.reserve(paddedBufferSize_);
472 MEDIA_LOG_I("increase padded buffer size to " PUBLIC_LOG_ZU, paddedBufferSize_);
473 }
474 paddedBuffer_.assign(ptr, ptr + bufferLength);
475 paddedBuffer_.insert(paddedBuffer_.end(), AV_INPUT_BUFFER_PADDING_SIZE, 0);
476 ptr = paddedBuffer_.data();
477 }
478 avPacket_->data = const_cast<uint8_t*>(ptr);
479 avPacket_->size = static_cast<int32_t>(bufferLength);
480 avPacket_->pts = static_cast<int64_t>(inputBuffer->pts);
481 }
482 auto ret = avcodec_send_packet(avCodecContext_.get(), avPacket_.get());
483 av_packet_unref(avPacket_.get());
484 if (ret < 0) {
485 MEDIA_LOG_DD("send buffer error " PUBLIC_LOG_S, AVStrError(ret).c_str());
486 return Status::ERROR_NO_MEMORY;
487 }
488 return Status::OK;
489 }
490
491 #ifdef DUMP_RAW_DATA
DumpVideoRawOutData()492 void VideoFfmpegDecoderPlugin::DumpVideoRawOutData()
493 {
494 if (dumpFd_ == nullptr) {
495 return;
496 }
497 if (pixelFormat_ == VideoPixelFormat::YUV420P) {
498 if (scaleData_[0] != nullptr && scaleLineSize_[0] != 0) {
499 std::fwrite(reinterpret_cast<const char*>(scaleData_[0]),
500 scaleLineSize_[0] * height_, 1, dumpFd_);
501 }
502 if (scaleData_[1] != nullptr && scaleLineSize_[1] != 0) {
503 std::fwrite(reinterpret_cast<const char*>(scaleData_[1]),
504 scaleLineSize_[1] * height_ / 2, 1, dumpFd_); // 2
505 }
506 if (scaleData_[2] != nullptr && scaleLineSize_[2] != 0) { // 2
507 std::fwrite(reinterpret_cast<const char*>(scaleData_[2]),
508 scaleLineSize_[2] * height_ / 2, 1, dumpFd_); // 2
509 }
510 } else if (pixelFormat_ == VideoPixelFormat::NV21 || pixelFormat_ == VideoPixelFormat::NV12) {
511 if (scaleData_[0] != nullptr && scaleLineSize_[0] != 0) {
512 std::fwrite(reinterpret_cast<const char*>(scaleData_[0]),
513 scaleLineSize_[0] * height_, 1, dumpFd_);
514 }
515 if (scaleData_[1] != nullptr && scaleLineSize_[1] != 0) {
516 std::fwrite(reinterpret_cast<const char*>(scaleData_[1]),
517 scaleLineSize_[1] * height_ / 2, 1, dumpFd_); // 2
518 }
519 } else if (pixelFormat_ == VideoPixelFormat::RGBA || pixelFormat_ == VideoPixelFormat::ARGB ||
520 pixelFormat_ == VideoPixelFormat::ABGR || pixelFormat_ == VideoPixelFormat::BGRA) {
521 if (scaleData_[0] != nullptr && scaleLineSize_[0] != 0) {
522 std::fwrite(reinterpret_cast<const char*>(scaleData_[0]),
523 scaleLineSize_[0] * height_, 1, dumpFd_);
524 }
525 }
526 }
527 #endif
528
ScaleVideoFrame()529 Status VideoFfmpegDecoderPlugin::ScaleVideoFrame()
530 {
531 if (ConvertPixelFormatFromFFmpeg(static_cast<AVPixelFormat>(cachedFrame_->format)) == pixelFormat_ &&
532 static_cast<uint32_t>(cachedFrame_->width) == width_ &&
533 static_cast<uint32_t>(cachedFrame_->height) == height_) {
534 for (int32_t i = 0; cachedFrame_->linesize[i] > 0; i++) {
535 scaleData_[i] = cachedFrame_->data[i];
536 scaleLineSize_[i] = cachedFrame_->linesize[i];
537 }
538 return Status::OK;
539 }
540 if (!scale_) {
541 scale_ = std::make_shared<Ffmpeg::Scale>();
542 Ffmpeg::ScalePara scalePara {
543 static_cast<int32_t>(cachedFrame_->width),
544 static_cast<int32_t>(cachedFrame_->height),
545 static_cast<AVPixelFormat>(cachedFrame_->format),
546 static_cast<int32_t>(width_),
547 static_cast<int32_t>(height_),
548 Ffmpeg::ConvertPixelFormatToFFmpeg(pixelFormat_),
549 STRIDE_ALIGN
550 };
551 FALSE_RETURN_V_MSG(scale_->Init(scalePara, scaleData_, scaleLineSize_) == Status::OK,
552 Status::ERROR_UNKNOWN, "Scale init error");
553 isAllocScaleData_ = true;
554 }
555 auto res = scale_->Convert(cachedFrame_->data, cachedFrame_->linesize, scaleData_, scaleLineSize_);
556 FALSE_RETURN_V_MSG_E(res == Status::OK, Status::ERROR_UNKNOWN, "Scale convert fail.");
557 MEDIA_LOG_D("ScaleVideoFrame success");
558 return Status::OK;
559 }
560
561 #ifndef OHOS_LITE
WriteYuvDataStride(const std::shared_ptr<Buffer> & frameBuffer,uint32_t stride)562 Status VideoFfmpegDecoderPlugin::WriteYuvDataStride(const std::shared_ptr<Buffer>& frameBuffer, uint32_t stride)
563 {
564 auto frameBufferMem = frameBuffer->GetMemory();
565 if (frameBufferMem == nullptr) {
566 MEDIA_LOG_E("WriteYuvDataStride frameBuffer GetMemory nullptr");
567 return Status::ERROR_UNKNOWN;
568 }
569 size_t srcPos = 0;
570 size_t dstPos = 0;
571 if (pixelFormat_ == VideoPixelFormat::YUV420P) {
572 auto writeSize = scaleLineSize_[0];
573 for (uint32_t colNum = 0; colNum < height_; colNum++) {
574 frameBufferMem->Write(scaleData_[0] + srcPos, writeSize, dstPos);
575 dstPos += stride;
576 }
577 srcPos = 0;
578 writeSize = scaleLineSize_[1];
579 for (uint32_t colNum = 0; colNum < height_; colNum++) {
580 frameBufferMem->Write(scaleData_[1] + srcPos, writeSize, dstPos);
581 dstPos += stride;
582 }
583 srcPos = 0;
584 writeSize = scaleLineSize_[2]; // 2
585 for (uint32_t colNum = 0; colNum < height_; colNum++) {
586 frameBufferMem->Write(scaleData_[2] + srcPos, writeSize, dstPos); // 2
587 dstPos += stride;
588 }
589 } else if ((pixelFormat_ == VideoPixelFormat::NV12) || (pixelFormat_ == VideoPixelFormat::NV21)) {
590 auto writeSize = scaleLineSize_[0];
591 for (uint32_t colNum = 0; colNum < height_; colNum++) {
592 frameBufferMem->Write(scaleData_[0] + srcPos, writeSize, dstPos);
593 dstPos += stride;
594 }
595 srcPos = 0;
596 writeSize = scaleLineSize_[1];
597 for (uint32_t colNum = 0; colNum < height_; colNum++) {
598 frameBufferMem->Write(scaleData_[1] + srcPos, writeSize, dstPos);
599 dstPos += stride;
600 }
601 } else {
602 return Status::ERROR_UNSUPPORTED_FORMAT;
603 }
604 MEDIA_LOG_D("WriteYuvDataStride success");
605 return Status::OK;
606 }
607
WriteRgbDataStride(const std::shared_ptr<Buffer> & frameBuffer,int32_t stride)608 Status VideoFfmpegDecoderPlugin::WriteRgbDataStride(const std::shared_ptr<Buffer>& frameBuffer, int32_t stride)
609 {
610 auto frameBufferMem = frameBuffer->GetMemory();
611 if (frameBufferMem == nullptr) {
612 MEDIA_LOG_E("WriteRgbDataStride frameBuffer GetMemory nullptr");
613 return Status::ERROR_UNKNOWN;
614 }
615 if (pixelFormat_ == VideoPixelFormat::RGBA || pixelFormat_ == VideoPixelFormat::ARGB ||
616 pixelFormat_ == VideoPixelFormat::ABGR || pixelFormat_ == VideoPixelFormat::BGRA) {
617 size_t srcPos = 0;
618 size_t dstPos = 0;
619 auto writeSize = scaleLineSize_[0];
620 for (uint32_t colNum = 0; colNum < height_; colNum++) {
621 frameBufferMem->Write(scaleData_[0] + srcPos, writeSize, dstPos);
622 dstPos += static_cast<size_t>(stride);
623 }
624 } else {
625 return Status::ERROR_UNSUPPORTED_FORMAT;
626 }
627 MEDIA_LOG_D("WriteRgbDataStride success");
628 return Status::OK;
629 }
630 #endif
631
WriteYuvData(const std::shared_ptr<Buffer> & frameBuffer)632 Status VideoFfmpegDecoderPlugin::WriteYuvData(const std::shared_ptr<Buffer>& frameBuffer)
633 {
634 auto frameBufferMem = frameBuffer->GetMemory();
635 if (frameBufferMem == nullptr) {
636 return Status::ERROR_NULL_POINTER;
637 }
638 #ifndef OHOS_LITE
639 if (frameBufferMem->GetMemoryType() == Plugin::MemoryType::SURFACE_BUFFER) {
640 std::shared_ptr<Plugin::SurfaceMemory> surfaceMemory =
641 Plugin::ReinterpretPointerCast<Plugin::SurfaceMemory>(frameBufferMem);
642 auto stride = surfaceMemory->GetSurfaceBufferStride();
643 if (stride % width_) {
644 return WriteYuvDataStride(frameBuffer, stride);
645 }
646 }
647 #endif
648 size_t ySize = static_cast<size_t>(scaleLineSize_[0] * height_);
649 // AV_PIX_FMT_YUV420P: scaleLineSize_[0] = scaleLineSize_[1] * 2 = scaleLineSize_[2] * 2
650 // AV_PIX_FMT_NV12: scaleLineSize_[0] = scaleLineSize_[1]
651 size_t uvSize = static_cast<size_t>(scaleLineSize_[1] * height_ / 2); // 2
652 size_t frameSize = 0;
653 if (pixelFormat_ == VideoPixelFormat::YUV420P) {
654 frameSize = ySize + (uvSize * 2); // 2
655 } else if (pixelFormat_ == VideoPixelFormat::NV21 || pixelFormat_ == VideoPixelFormat::NV12) {
656 frameSize = ySize + uvSize;
657 }
658 FALSE_RETURN_V_MSG_E(frameBufferMem->GetCapacity() >= frameSize, Status::ERROR_NO_MEMORY,
659 "output buffer size is not enough: real[" PUBLIC_LOG "zu], need[" PUBLIC_LOG "zu]",
660 frameBufferMem->GetCapacity(), frameSize);
661 if (pixelFormat_ == VideoPixelFormat::YUV420P) {
662 frameBufferMem->Write(scaleData_[0], ySize);
663 frameBufferMem->Write(scaleData_[1], uvSize);
664 frameBufferMem->Write(scaleData_[2], uvSize); // 2
665 } else if ((pixelFormat_ == VideoPixelFormat::NV12) || (pixelFormat_ == VideoPixelFormat::NV21)) {
666 frameBufferMem->Write(scaleData_[0], ySize);
667 frameBufferMem->Write(scaleData_[1], uvSize);
668 } else {
669 return Status::ERROR_UNSUPPORTED_FORMAT;
670 }
671 MEDIA_LOG_DD("WriteYuvData success");
672 return Status::OK;
673 }
674
WriteRgbData(const std::shared_ptr<Buffer> & frameBuffer)675 Status VideoFfmpegDecoderPlugin::WriteRgbData(const std::shared_ptr<Buffer>& frameBuffer)
676 {
677 auto frameBufferMem = frameBuffer->GetMemory();
678 if (frameBufferMem == nullptr) {
679 MEDIA_LOG_E("WriteRgbData frameBuffer GetMemory nullptr");
680 return Status::ERROR_UNKNOWN;
681 }
682 #ifndef OHOS_LITE
683 if (frameBufferMem->GetMemoryType() == Plugin::MemoryType::SURFACE_BUFFER) {
684 std::shared_ptr<Plugin::SurfaceMemory> surfaceMemory =
685 Plugin::ReinterpretPointerCast<Plugin::SurfaceMemory>(frameBufferMem);
686 auto stride = surfaceMemory->GetSurfaceBufferStride();
687 if (stride % width_) {
688 return WriteRgbDataStride(frameBuffer, stride);
689 }
690 }
691 #endif
692 size_t frameSize = static_cast<size_t>(scaleLineSize_[0] * height_);
693 FALSE_RETURN_V_MSG_E(frameBufferMem->GetCapacity() >= frameSize, Status::ERROR_NO_MEMORY,
694 "output buffer size is not enough: real[" PUBLIC_LOG "zu], need[" PUBLIC_LOG "zu]",
695 frameBufferMem->GetCapacity(), frameSize);
696 if (pixelFormat_ == VideoPixelFormat::RGBA || pixelFormat_ == VideoPixelFormat::ARGB ||
697 pixelFormat_ == VideoPixelFormat::ABGR || pixelFormat_ == VideoPixelFormat::BGRA) {
698 frameBufferMem->Write(scaleData_[0], frameSize);
699 } else {
700 return Status::ERROR_UNSUPPORTED_FORMAT;
701 }
702 MEDIA_LOG_D("WriteRgbData success");
703 return Status::OK;
704 }
705
FillFrameBuffer(const std::shared_ptr<Buffer> & frameBuffer)706 Status VideoFfmpegDecoderPlugin::FillFrameBuffer(const std::shared_ptr<Buffer>& frameBuffer)
707 {
708 MEDIA_LOG_DD("receive one frame: " PUBLIC_LOG_D32 ", picture type: " PUBLIC_LOG_D32 ", pixel format: "
709 PUBLIC_LOG_D32 ", packet size: " PUBLIC_LOG_D32, cachedFrame_->key_frame,
710 static_cast<int32_t>(cachedFrame_->pict_type), cachedFrame_->format, cachedFrame_->pkt_size);
711 FALSE_RETURN_V_MSG_E((static_cast<uint32_t>(cachedFrame_->flags) & AV_FRAME_FLAG_CORRUPT) == 0,
712 Status::ERROR_INVALID_DATA,
713 "decoded frame is corrupt");
714 auto ret = ScaleVideoFrame();
715 FALSE_RETURN_V_MSG_E(ret == Status::OK, ret, "ScaleVideoFrame fail: " PUBLIC_LOG_D32, ret);
716 auto bufferMeta = frameBuffer->GetBufferMeta();
717 if (bufferMeta != nullptr && bufferMeta->GetType() == BufferMetaType::VIDEO) {
718 std::shared_ptr<VideoBufferMeta> videoMeta = ReinterpretPointerCast<VideoBufferMeta>(bufferMeta);
719 videoMeta->videoPixelFormat = pixelFormat_;
720 videoMeta->height = height_;
721 videoMeta->width = width_;
722 for (int i = 0; scaleLineSize_[i] > 0; ++i) {
723 videoMeta->stride.emplace_back(scaleLineSize_[i]);
724 }
725 videoMeta->planes = videoMeta->stride.size();
726 }
727 #ifdef DUMP_RAW_DATA
728 DumpVideoRawOutData();
729 #endif
730 auto newFormat = ConvertPixelFormatToFFmpeg(pixelFormat_);
731 if (IsYuvFormat(newFormat)) {
732 FALSE_RETURN_V_MSG_E(WriteYuvData(frameBuffer) == Status::OK, Status::ERROR_UNSUPPORTED_FORMAT,
733 "Unsupported pixel format: " PUBLIC_LOG_U32, pixelFormat_);
734 } else if (IsRgbFormat(newFormat)) {
735 FALSE_RETURN_V_MSG_E(WriteRgbData(frameBuffer) == Status::OK, Status::ERROR_UNSUPPORTED_FORMAT,
736 "Unsupported pixel format: " PUBLIC_LOG_U32, pixelFormat_);
737 } else {
738 MEDIA_LOG_E("Unsupported pixel format: " PUBLIC_LOG_U32, pixelFormat_);
739 return Status::ERROR_UNSUPPORTED_FORMAT;
740 }
741 frameBuffer->pts = static_cast<uint64_t>(cachedFrame_->pts);
742 MEDIA_LOG_DD("FillFrameBuffer success");
743 return Status::OK;
744 }
745
ReceiveBufferLocked(const std::shared_ptr<Buffer> & frameBuffer)746 Status VideoFfmpegDecoderPlugin::ReceiveBufferLocked(const std::shared_ptr<Buffer>& frameBuffer)
747 {
748 if (state_ != State::RUNNING) {
749 MEDIA_LOG_W("ReceiveBufferLocked in wrong state: " PUBLIC_LOG_D32, state_);
750 return Status::ERROR_WRONG_STATE;
751 }
752 Status status;
753 auto ret = avcodec_receive_frame(avCodecContext_.get(), cachedFrame_.get());
754 if (ret >= 0) {
755 status = FillFrameBuffer(frameBuffer);
756 } else if (ret == AVERROR_EOF) {
757 MEDIA_LOG_I("eos received");
758 auto frameBufferMem = frameBuffer->GetMemory();
759 if (frameBufferMem == nullptr) {
760 MEDIA_LOG_E("ReceiveBufferLocked frameBuffer GetMemory nullptr");
761 return Status::ERROR_UNKNOWN;
762 }
763 frameBufferMem->Reset();
764 frameBuffer->flag |= BUFFER_FLAG_EOS;
765 avcodec_flush_buffers(avCodecContext_.get());
766 status = Status::END_OF_STREAM;
767 } else {
768 MEDIA_LOG_DD("video decoder receive error: " PUBLIC_LOG_S, AVStrError(ret).c_str());
769 status = Status::ERROR_TIMED_OUT;
770 }
771 av_frame_unref(cachedFrame_.get());
772 MEDIA_LOG_DD("ReceiveBufferLocked status: " PUBLIC_LOG_U32, status);
773 return status;
774 }
775
ReceiveFrameBuffer()776 void VideoFfmpegDecoderPlugin::ReceiveFrameBuffer()
777 {
778 std::shared_ptr<Buffer> frameBuffer = outBufferQ_.Pop();
779 if (frameBuffer == nullptr || frameBuffer->IsEmpty()) {
780 MEDIA_LOG_W("cannot fetch valid buffer to output");
781 return;
782 }
783 auto frameMeta = frameBuffer->GetBufferMeta();
784 if (frameMeta == nullptr || frameMeta->GetType() != BufferMetaType::VIDEO) {
785 MEDIA_LOG_W("output buffer is not video buffer");
786 return;
787 }
788 Status status;
789 {
790 OSAL::ScopedLock l(avMutex_);
791 status = ReceiveBufferLocked(frameBuffer);
792 }
793 if (status == Status::OK || status == Status::END_OF_STREAM) {
794 NotifyOutputBufferDone(frameBuffer);
795 } else {
796 outBufferQ_.Push(frameBuffer);
797 }
798 }
799
NotifyInputBufferDone(const std::shared_ptr<Buffer> & input)800 void VideoFfmpegDecoderPlugin::NotifyInputBufferDone(const std::shared_ptr<Buffer>& input)
801 {
802 if (dataCb_ != nullptr) {
803 dataCb_->OnInputBufferDone(input);
804 }
805 }
806
NotifyOutputBufferDone(const std::shared_ptr<Buffer> & output)807 void VideoFfmpegDecoderPlugin::NotifyOutputBufferDone(const std::shared_ptr<Buffer>& output)
808 {
809 if (dataCb_ != nullptr) {
810 dataCb_->OnOutputBufferDone(output);
811 }
812 }
813
GetAllocator()814 std::shared_ptr<Allocator> VideoFfmpegDecoderPlugin::GetAllocator()
815 {
816 return nullptr;
817 }
818 } // namespace Plugin
819 } // namespace Media
820 } // namespace OHOS
821 #endif
822