• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 
18 #include "distributed_camera_constants.h"
19 #include "distributed_hardware_log.h"
20 #include "dcamera_hisysevent_adapter.h"
21 #include "dcamera_hidumper.h"
22 #include "dcamera_radar.h"
23 #include "decode_surface_listener.h"
24 #include "decode_video_callback.h"
25 #include "graphic_common_c.h"
26 #include <sys/prctl.h>
27 
28 namespace OHOS {
29 namespace DistributedHardware {
30 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
31     "YUVI420", "NV12", "NV21", "RGBA_8888"
32 };
33 
~DecodeDataProcess()34 DecodeDataProcess::~DecodeDataProcess()
35 {
36     DumpFileUtil::CloseDumpFile(&dumpDecBeforeFile_);
37     DumpFileUtil::CloseDumpFile(&dumpDecAfterFile_);
38     if (isDecoderProcess_.load()) {
39         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
40         ReleaseProcessNode();
41     }
42 }
43 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)44 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
45     VideoConfigParams& processedConfig)
46 {
47     DHLOGD("Init DCamera DecodeNode start.");
48     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
49         DHLOGE("Source config or target config are invalid.");
50         return DCAMERA_BAD_VALUE;
51     }
52 
53     if (!IsConvertible(sourceConfig, targetConfig)) {
54         DHLOGE("The DecodeNode can't convert %{public}d to %{public}d.", sourceConfig.GetVideoCodecType(),
55             targetConfig_.GetVideoCodecType());
56         return DCAMERA_BAD_TYPE;
57     }
58 
59     sourceConfig_ = sourceConfig;
60     targetConfig_ = targetConfig;
61     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
62         DHLOGD("Disable DecodeNode. The target video codec type %{public}d is the same as the source video codec "
63             "type %{public}d.", targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType());
64         processedConfig_ = sourceConfig;
65         processedConfig = processedConfig_;
66         isDecoderProcess_.store(true);
67         return DCAMERA_OK;
68     }
69 
70     InitCodecEvent();
71     int32_t err = InitDecoder();
72     DcameraRadar::GetInstance().ReportDcameraOpenProgress("InitDecoder", CameraOpen::INIT_DECODE, err);
73     if (err != DCAMERA_OK) {
74         DHLOGE("Init video decoder failed.");
75         ReleaseProcessNode();
76         return err;
77     }
78     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
79     processedConfig = processedConfig_;
80     isDecoderProcess_.store(true);
81     return DCAMERA_OK;
82 }
83 
IsInDecoderRange(const VideoConfigParams & curConfig)84 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
85 {
86     bool isWidthValid = (curConfig.GetWidth() >= MIN_VIDEO_WIDTH && curConfig.GetWidth() <= MAX_VIDEO_WIDTH);
87     bool isHeightValid = (curConfig.GetHeight() >= MIN_VIDEO_HEIGHT && curConfig.GetHeight() <= MAX_VIDEO_HEIGHT);
88     bool isFrameRateValid = (curConfig.GetFrameRate() >= MIN_FRAME_RATE && curConfig.GetFrameRate() <= MAX_FRAME_RATE);
89     return isWidthValid && isHeightValid && isFrameRateValid;
90 }
91 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)92 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
93 {
94     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
95         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
96 }
97 
InitCodecEvent()98 void DecodeDataProcess::InitCodecEvent()
99 {
100     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
101     eventThread_ = std::thread([this]() { this->StartEventHandler(); });
102     std::unique_lock<std::mutex> lock(eventMutex_);
103     eventCon_.wait(lock, [this] {
104         return decEventHandler_ != nullptr;
105     });
106 }
107 
StartEventHandler()108 void DecodeDataProcess::StartEventHandler()
109 {
110     prctl(PR_SET_NAME, DECODE_DATA_EVENT.c_str());
111     auto runner = AppExecFwk::EventRunner::Create(false);
112     if (runner == nullptr) {
113         DHLOGE("Creat runner failed.");
114         return;
115     }
116     {
117         std::lock_guard<std::mutex> lock(eventMutex_);
118         decEventHandler_ = std::make_shared<AppExecFwk::EventHandler>(runner);
119     }
120     eventCon_.notify_one();
121     runner->Run();
122 }
123 
InitDecoder()124 int32_t DecodeDataProcess::InitDecoder()
125 {
126     DHLOGD("Init video decoder.");
127     int32_t ret = ConfigureVideoDecoder();
128     if (ret != DCAMERA_OK) {
129         DHLOGE("Init video decoder metadata format failed.");
130         return ret;
131     }
132 
133     ret = StartVideoDecoder();
134     if (ret != DCAMERA_OK) {
135         DHLOGE("Start Video decoder failed.");
136         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
137             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s",
138             sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
139             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
140         return ret;
141     }
142     return DCAMERA_OK;
143 }
144 
ConfigureVideoDecoder()145 int32_t DecodeDataProcess::ConfigureVideoDecoder()
146 {
147     int32_t ret = InitDecoderMetadataFormat();
148     if (ret != DCAMERA_OK) {
149         DHLOGE("Init video decoder metadata format failed. ret %{public}d.", ret);
150         return ret;
151     }
152 
153     videoDecoder_ = MediaAVCodec::VideoDecoderFactory::CreateByMime(processType_);
154     CHECK_AND_RETURN_RET_LOG(videoDecoder_ == nullptr, DCAMERA_INIT_ERR, "%{public}s",
155         "Create video decoder failed.");
156     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
157     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
158     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
159         "Set video decoder callback failed.  ret %{public}d.", ret);
160     ret = videoDecoder_->Configure(metadataFormat_);
161     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
162         "Set video decoder metadata format failed. ret %{public}d.", ret);
163     ret = SetDecoderOutputSurface();
164     CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
165         "Set decoder output surface failed. ret %d.", ret);
166     return DCAMERA_OK;
167 }
168 
InitDecoderMetadataFormat()169 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
170 {
171     DHLOGI("Init video decoder metadata format. codecType: %{public}d", sourceConfig_.GetVideoCodecType());
172     processedConfig_ = sourceConfig_;
173     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
174     processedConfig_.SetVideoformat(Videoformat::YUVI420);
175     switch (sourceConfig_.GetVideoCodecType()) {
176         case VideoCodecType::CODEC_H264:
177             processType_ = "video/avc";
178             break;
179         case VideoCodecType::CODEC_H265:
180             processType_ = "video/hevc";
181             break;
182         default:
183             DHLOGE("The current codec type does not support decoding.");
184             return DCAMERA_NOT_FOUND;
185     }
186 
187     metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
188     metadataFormat_.PutStringValue("codec_mime", processType_);
189     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
190     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
191     metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
192 
193     return DCAMERA_OK;
194 }
195 
SetDecoderOutputSurface()196 int32_t DecodeDataProcess::SetDecoderOutputSurface()
197 {
198     DHLOGD("Set the video decoder output surface.");
199     if (videoDecoder_ == nullptr) {
200         DHLOGE("The video decoder is null.");
201         return DCAMERA_BAD_VALUE;
202     }
203 
204     decodeConsumerSurface_ = IConsumerSurface::Create();
205     if (decodeConsumerSurface_ == nullptr) {
206         DHLOGE("Create the decode consumer surface failed.");
207         return DCAMERA_INIT_ERR;
208     }
209     decodeConsumerSurface_->SetDefaultWidthAndHeight(static_cast<int32_t>(sourceConfig_.GetWidth()),
210         static_cast<int32_t>(sourceConfig_.GetHeight()));
211     GSError ret = decodeConsumerSurface_->SetDefaultUsage(SurfaceBufferUsage::BUFFER_USAGE_MEM_MMZ_CACHE |
212         SurfaceBufferUsage::BUFFER_USAGE_CPU_READ);
213     CHECK_AND_LOG(ret != GSERROR_OK || decodeConsumerSurface_ == nullptr, "%{public}s", "Set Usage failed.");
214 
215     decodeSurfaceListener_ =
216         OHOS::sptr<IBufferConsumerListener>(new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this()));
217     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
218         SURFACE_ERROR_OK) {
219         DHLOGE("Register consumer listener failed.");
220         return DCAMERA_INIT_ERR;
221     }
222 
223     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
224     CHECK_AND_RETURN_RET_LOG(surfaceProducer == nullptr, DCAMERA_INIT_ERR, "%{public}s",
225         "Get the surface producer of the decode consumer surface failed.");
226     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
227     if (decodeProducerSurface_ == nullptr) {
228         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
229         return DCAMERA_INIT_ERR;
230     }
231 
232     DHLOGD("Set the producer surface to video decoder output surface.");
233     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
234     CHECK_AND_RETURN_RET_LOG(err != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR, "%{public}s",
235         "Set decoder output surface failed.");
236     return DCAMERA_OK;
237 }
238 
StartVideoDecoder()239 int32_t DecodeDataProcess::StartVideoDecoder()
240 {
241     if (videoDecoder_ == nullptr) {
242         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
243         return DCAMERA_BAD_VALUE;
244     }
245 
246     int32_t ret = videoDecoder_->Prepare();
247     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
248         "Video decoder prepare failed. ret %{public}d.", ret);
249     ret = videoDecoder_->Start();
250     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
251         "Video decoder start failed. ret %{public}d.", ret);
252     return DCAMERA_OK;
253 }
254 
StopVideoDecoder()255 int32_t DecodeDataProcess::StopVideoDecoder()
256 {
257     if (videoDecoder_ == nullptr) {
258         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
259         return DCAMERA_BAD_VALUE;
260     }
261 
262     bool isSuccess = true;
263     int32_t ret = videoDecoder_->Flush();
264     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
265         DHLOGE("VideoDecoder flush failed. ret %{public}d.", ret);
266         isSuccess = isSuccess && false;
267     }
268     ret = videoDecoder_->Stop();
269     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
270         DHLOGE("VideoDecoder stop failed. ret %{public}d.", ret);
271         isSuccess = isSuccess && false;
272     }
273     if (!isSuccess) {
274         return DCAMERA_BAD_OPERATE;
275     }
276     return DCAMERA_OK;
277 }
278 
ReleaseVideoDecoder()279 void DecodeDataProcess::ReleaseVideoDecoder()
280 {
281     DHLOGD("Start release videoDecoder.");
282     std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
283     std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
284     if (videoDecoder_ == nullptr) {
285         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
286         decodeVideoCallback_ = nullptr;
287         return;
288     }
289     int32_t ret = StopVideoDecoder();
290     CHECK_AND_LOG(ret != DCAMERA_OK, "%{public}s", "StopVideoDecoder failed.");
291     ret = videoDecoder_->Release();
292     CHECK_AND_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
293         "VideoDecoder release failed. ret %{public}d.", ret);
294     videoDecoder_ = nullptr;
295     decodeVideoCallback_ = nullptr;
296 }
297 
ReleaseDecoderSurface()298 void DecodeDataProcess::ReleaseDecoderSurface()
299 {
300     if (decodeConsumerSurface_ == nullptr) {
301         decodeProducerSurface_ = nullptr;
302         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
303         return;
304     }
305     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
306     CHECK_AND_LOG(ret != SURFACE_ERROR_OK, "VideoDecoder release failed. ret %d.", ret);
307     decodeConsumerSurface_ = nullptr;
308     decodeProducerSurface_ = nullptr;
309 }
310 
ReleaseCodecEvent()311 void DecodeDataProcess::ReleaseCodecEvent()
312 {
313     if ((decEventHandler_ != nullptr) && (decEventHandler_->GetEventRunner() != nullptr)) {
314         decEventHandler_->GetEventRunner()->Stop();
315         eventThread_.join();
316     }
317     decEventHandler_ = nullptr;
318     pipeSrcEventHandler_ = nullptr;
319     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
320 }
321 
ReleaseProcessNode()322 void DecodeDataProcess::ReleaseProcessNode()
323 {
324     DHLOGD("Start release [%{public}zu] node : DecodeNode.", nodeRank_);
325     isDecoderProcess_.store(false);
326     ReleaseVideoDecoder();
327     ReleaseDecoderSurface();
328     ReleaseCodecEvent();
329 
330     processType_ = "";
331     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
332     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
333     std::queue<std::shared_ptr<Media::AVSharedMemory>>().swap(availableInputBufferQueue_);
334     std::deque<DCameraFrameInfo>().swap(frameInfoDeque_);
335     waitDecoderOutputCount_ = 0;
336     lastFeedDecoderInputBufferTimeUs_ = 0;
337     outputTimeStampUs_ = 0;
338     alignedHeight_ = 0;
339 
340     if (nextDataProcess_ != nullptr) {
341         nextDataProcess_->ReleaseProcessNode();
342         nextDataProcess_ = nullptr;
343     }
344     DHLOGD("Release [%{public}zu] node : DecodeNode end.", nodeRank_);
345 }
346 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)347 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
348 {
349     DHLOGD("Process data in DecodeDataProcess.");
350     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
351         DHLOGE("The input data buffers is empty.");
352         return DCAMERA_BAD_VALUE;
353     }
354     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_BEFORE_DEC_FILENAME, &dumpDecBeforeFile_);
355     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_AFTER_DEC_FILENAME, &dumpDecAfterFile_);
356     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
357         DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
358             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
359         return DecodeDone(inputBuffers);
360     }
361 
362     if (videoDecoder_ == nullptr) {
363         DHLOGE("The video decoder does not exist before decoding data.");
364         return DCAMERA_INIT_ERR;
365     }
366     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
367         DHLOGE("video decoder input buffers queue over flow.");
368         return DCAMERA_INDEX_OVERFLOW;
369     }
370     if (inputBuffers[0]->Size() > MAX_YUV420_BUFFER_SIZE) {
371         DHLOGE("DecodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
372         return DCAMERA_MEMORY_OPT_ERROR;
373     }
374     if (!isDecoderProcess_.load()) {
375         DHLOGE("Decoder node occurred error or start release.");
376         return DCAMERA_DISABLE_PROCESS;
377     }
378     inputBuffersQueue_.push(inputBuffers[0]);
379     DHLOGD("Push inputBuf sucess. BufSize %{public}zu, QueueSize %{public}zu.", inputBuffers[0]->Size(),
380         inputBuffersQueue_.size());
381     int32_t err = FeedDecoderInputBuffer();
382     if (err != DCAMERA_OK) {
383         int32_t sleepTimeUs = 5000;
384         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
385         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
386         auto sendFunc = [this]() mutable {
387             int32_t ret = FeedDecoderInputBuffer();
388             DHLOGD("excute FeedDecoderInputBuffer ret %{public}d.", ret);
389         };
390         CHECK_AND_RETURN_RET_LOG(pipeSrcEventHandler_ == nullptr, DCAMERA_BAD_VALUE,
391             "%{public}s", "pipeSrcEventHandler_ is nullptr.");
392         pipeSrcEventHandler_->PostTask(sendFunc);
393     }
394     return DCAMERA_OK;
395 }
396 
BeforeDecodeDump(uint8_t * buffer,size_t bufSize)397 void DecodeDataProcess::BeforeDecodeDump(uint8_t *buffer, size_t bufSize)
398 {
399 #ifdef DUMP_DCAMERA_FILE
400     if (buffer == nullptr) {
401         DHLOGE("dumpsaving : input param nullptr.");
402         return;
403     }
404     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + BEFORE_DECODE) == DCAMERA_OK)) {
405         DumpBufferToFile(DUMP_PATH + BEFORE_DECODE, buffer, bufSize);
406     }
407 #endif
408     return;
409 }
410 
FeedDecoderInputBuffer()411 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
412 {
413     DHLOGD("Feed decoder input buffer.");
414     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
415         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
416         if (buffer == nullptr || availableInputIndexsQueue_.empty() || availableInputBufferQueue_.empty()) {
417             DHLOGE("inputBuffersQueue size %{public}zu, availableInputIndexsQueue size %{public}zu, "
418                 "availableInputBufferQueue size %{public}zu",
419                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size(), availableInputBufferQueue_.size());
420             return DCAMERA_BAD_VALUE;
421         }
422         buffer->frameInfo_.timePonit.startDecode = GetNowTimeStampUs();
423         {
424             std::lock_guard<std::mutex> lock(mtxDequeLock_);
425             frameInfoDeque_.push_back(buffer->frameInfo_);
426         }
427         int64_t timeStamp = buffer->frameInfo_.pts;
428         {
429             std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
430             CHECK_AND_RETURN_RET_LOG(
431                 videoDecoder_ == nullptr, DCAMERA_OK, "The video decoder does not exist before GetInputBuffer.");
432             uint32_t index = availableInputIndexsQueue_.front();
433             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = availableInputBufferQueue_.front();
434             if (sharedMemoryInput == nullptr) {
435                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%{public}u] index.", index);
436                 return DCAMERA_BAD_VALUE;
437             }
438             BeforeDecodeDump(buffer->Data(), buffer->Size());
439             DumpFileUtil::WriteDumpFile(dumpDecBeforeFile_, static_cast<void *>(buffer->Data()), buffer->Size());
440             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
441             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
442             CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "memcpy_s buffer failed.");
443             DHLOGD("Decoder input buffer size %{public}zu, timeStamp %{public}" PRId64"us.", buffer->Size(), timeStamp);
444             MediaAVCodec::AVCodecBufferInfo bufferInfo {timeStamp, static_cast<int32_t>(buffer->Size()), 0};
445             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
446                 MediaAVCodec::AVCODEC_BUFFER_FLAG_NONE);
447             if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
448                 DHLOGE("queue Input buffer failed.");
449                 return DCAMERA_BAD_OPERATE;
450             }
451         }
452 
453         inputBuffersQueue_.pop();
454         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %{public}zu.", inputBuffersQueue_.size());
455 
456         IncreaseWaitDecodeCnt();
457     }
458     return DCAMERA_OK;
459 }
460 
GetDecoderTimeStamp()461 int64_t DecodeDataProcess::GetDecoderTimeStamp()
462 {
463     int64_t TimeIntervalStampUs = 0;
464     int64_t nowTimeUs = GetNowTimeStampUs();
465     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
466         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
467         return TimeIntervalStampUs;
468     }
469     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
470     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
471     return TimeIntervalStampUs;
472 }
473 
IncreaseWaitDecodeCnt()474 void DecodeDataProcess::IncreaseWaitDecodeCnt()
475 {
476     std::lock_guard<std::mutex> lck(mtxHoldCount_);
477     availableInputIndexsQueue_.pop();
478     availableInputBufferQueue_.pop();
479     waitDecoderOutputCount_++;
480     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
481 }
482 
ReduceWaitDecodeCnt()483 void DecodeDataProcess::ReduceWaitDecodeCnt()
484 {
485     std::lock_guard<std::mutex> lck(mtxHoldCount_);
486     if (waitDecoderOutputCount_ <= 0) {
487         DHLOGE("The waitDecoderOutputCount_ = %{public}d.", waitDecoderOutputCount_);
488     }
489     if (outputTimeStampUs_ == 0) {
490         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
491     } else {
492         waitDecoderOutputCount_--;
493     }
494     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
495 }
496 
OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface> & surface)497 void DecodeDataProcess::OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface>& surface)
498 {
499     auto sendFunc = [this, surface]() mutable {
500         GetDecoderOutputBuffer(surface);
501         DHLOGD("excute GetDecoderOutputBuffer.");
502     };
503     if (decEventHandler_ != nullptr) {
504         decEventHandler_->PostTask(sendFunc);
505     }
506 }
507 
GetDecoderOutputBuffer(const sptr<IConsumerSurface> & surface)508 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<IConsumerSurface>& surface)
509 {
510     DHLOGD("Get decoder output buffer.");
511     if (surface == nullptr) {
512         DHLOGE("Get decode consumer surface failed.");
513         return;
514     }
515     Rect damage = {0, 0, 0, 0};
516     int32_t acquireFence = 0;
517     int64_t timeStamp = 0;
518     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
519     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStamp, damage);
520     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
521         DHLOGE("Acquire surface buffer failed!");
522         return;
523     }
524     ret = surfaceBuffer->InvalidateCache();
525     CHECK_AND_LOG(ret != GSERROR_OK, "Invalidate cache failed.");
526     int32_t alignedWidth = surfaceBuffer->GetStride();
527     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
528         DHLOGE("surface buffer size or alignedWidth too long");
529         return;
530     }
531     int32_t alignedHeight = alignedHeight_;
532     DHLOGD("OutputBuffer alignedWidth %{public}d, alignedHeight %{public}d, timeStamp %{public}ld ns.",
533         alignedWidth, alignedHeight, timeStamp);
534     CopyDecodedImage(surfaceBuffer, alignedWidth, alignedHeight);
535     surface->ReleaseBuffer(surfaceBuffer, -1);
536     outputTimeStampUs_ = timeStamp;
537     ReduceWaitDecodeCnt();
538 }
539 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)540 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
541     int32_t alignedHeight)
542 {
543     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
544         DHLOGE("Surface output buffer error.");
545         return;
546     }
547 
548     DHLOGD("Convert NV12 to I420, format=%{public}d, width=[%{public}d, %{public}d], height=[%{public}d, %{public}d]",
549         sourceConfig_.GetVideoformat(), sourceConfig_.GetWidth(), alignedWidth, sourceConfig_.GetHeight(),
550         alignedHeight);
551     int srcSizeY = alignedWidth * alignedHeight;
552     uint8_t *srcDataY = static_cast<uint8_t *>(surBuf->GetVirAddr());
553     uint8_t *srcDataUV = static_cast<uint8_t *>(surBuf->GetVirAddr()) + srcSizeY;
554 
555     int dstSizeY = sourceConfig_.GetWidth() * sourceConfig_.GetHeight();
556     int dstSizeUV = (static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV) *
557                     (static_cast<uint32_t>(sourceConfig_.GetHeight()) >> MEMORY_RATIO_UV);
558     std::shared_ptr<DataBuffer> bufferOutput =
559         std::make_shared<DataBuffer>(dstSizeY * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
560     uint8_t *dstDataY = bufferOutput->Data();
561     uint8_t *dstDataU = bufferOutput->Data() + dstSizeY;
562     uint8_t *dstDataV = bufferOutput->Data() + dstSizeY + dstSizeUV;
563     auto converter = ConverterHandle::GetInstance().GetHandle();
564     CHECK_AND_RETURN_LOG(converter.NV12ToI420 == nullptr, "converter is null.");
565     int32_t ret = converter.NV12ToI420(srcDataY, alignedWidth, srcDataUV, alignedWidth,
566         dstDataY, sourceConfig_.GetWidth(),
567         dstDataU, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
568         dstDataV, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
569         processedConfig_.GetWidth(), processedConfig_.GetHeight());
570     if (ret != DCAMERA_OK) {
571         DHLOGE("Convert NV12 to I420 failed.");
572         return;
573     }
574     {
575         std::lock_guard<std::mutex> lock(mtxDequeLock_);
576         bufferOutput->frameInfo_ = frameInfoDeque_.front();
577         frameInfoDeque_.pop_front();
578     }
579     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(Videoformat::YUVI420));
580     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
581     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
582     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
583     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
584 #ifdef DUMP_DCAMERA_FILE
585     std::string fileName = "SourceAfterDecode_width(" + std::to_string(processedConfig_.GetWidth())
586         + ")height(" + std::to_string(processedConfig_.GetHeight()) + ").yuv";
587     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + fileName) == DCAMERA_OK)) {
588         DumpBufferToFile(DUMP_PATH + fileName, bufferOutput->Data(), bufferOutput->Size());
589     }
590 #endif
591     DumpFileUtil::WriteDumpFile(dumpDecAfterFile_, static_cast<void *>(bufferOutput->Data()), bufferOutput->Size());
592     PostOutputDataBuffers(bufferOutput);
593 }
594 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)595 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
596     int32_t alignedHeight)
597 {
598     if (surBuf == nullptr) {
599         DHLOGE("surface buffer is null!");
600         return false;
601     }
602 
603     size_t yuvImageAlignedSize = static_cast<size_t>(alignedWidth * alignedHeight *
604                                                               YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
605     size_t yuvImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
606                                                        YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
607     size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
608     if (yuvImageAlignedSize > surfaceBufSize || yuvImageAlignedSize < yuvImageSize) {
609         DHLOGE("Buffer size error, yuvImageSize %{public}zu, yuvImageAlignedSize %{public}zu, surBufSize %{public}"
610             PRIu32, yuvImageSize, yuvImageAlignedSize, surBuf->GetSize());
611         return false;
612     }
613     return true;
614 }
615 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)616 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
617 {
618     if (decEventHandler_ == nullptr || outputBuffer == nullptr) {
619         DHLOGE("decEventHandler_ or outputBuffer is null.");
620         return;
621     }
622     auto sendFunc = [this, outputBuffer]() mutable {
623         std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
624         multiDataBuffers.push_back(outputBuffer);
625         int32_t ret = DecodeDone(multiDataBuffers);
626         DHLOGD("excute DecodeDone ret %{public}d.", ret);
627     };
628     if (decEventHandler_ != nullptr) {
629         decEventHandler_->PostTask(sendFunc);
630     }
631     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
632 }
633 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)634 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
635 {
636     DHLOGD("Decoder Done.");
637     if (outputBuffers.empty()) {
638         DHLOGE("The received data buffers is empty.");
639         return DCAMERA_BAD_VALUE;
640     }
641 
642     if (nextDataProcess_ != nullptr) {
643         DHLOGD("Send to the next node of the decoder for processing.");
644         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
645         if (err != DCAMERA_OK) {
646             DHLOGE("Someone node after the decoder processes failed.");
647         }
648         return err;
649     }
650     DHLOGD("The current node is the last node, and Output the processed video buffer");
651     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
652     if (targetPipelineSource == nullptr) {
653         DHLOGE("callbackPipelineSource_ is nullptr.");
654         return DCAMERA_BAD_VALUE;
655     }
656     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
657     return DCAMERA_OK;
658 }
659 
OnError()660 void DecodeDataProcess::OnError()
661 {
662     DHLOGD("DecodeDataProcess : OnError.");
663     isDecoderProcess_.store(false);
664     if (videoDecoder_ != nullptr) {
665         videoDecoder_->Stop();
666     }
667     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
668     if (targetPipelineSource == nullptr) {
669         DHLOGE("callbackPipelineSource_ is nullptr.");
670         return;
671     }
672     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
673 }
674 
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)675 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
676 {
677     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
678     std::lock_guard<std::mutex> lck(mtxHoldCount_);
679     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
680         DHLOGE("Video decoder available indexs queue overflow.");
681         return;
682     }
683     DHLOGD("Video decoder available indexs queue push index [%{public}u].", index);
684     availableInputIndexsQueue_.push(index);
685     availableInputBufferQueue_.push(buffer);
686 }
687 
OnOutputFormatChanged(const Media::Format & format)688 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
689 {
690     if (decodeOutputFormat_.GetFormatMap().empty()) {
691         DHLOGE("The first changed video decoder output format is null.");
692         return;
693     }
694     decodeOutputFormat_ = format;
695 }
696 
OnOutputBufferAvailable(uint32_t index,const MediaAVCodec::AVCodecBufferInfo & info,const MediaAVCodec::AVCodecBufferFlag & flag,std::shared_ptr<Media::AVSharedMemory> buffer)697 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const MediaAVCodec::AVCodecBufferInfo& info,
698     const MediaAVCodec::AVCodecBufferFlag& flag, std::shared_ptr<Media::AVSharedMemory> buffer)
699 {
700     int64_t finishDecodeT = GetNowTimeStampUs();
701     if (!isDecoderProcess_.load()) {
702         DHLOGE("Decoder node occurred error or start release.");
703         return;
704     }
705     DHLOGD("Video decode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, "
706         "flag %{public}d", info.presentationTimeUs, info.size, info.offset, flag);
707     outputInfo_ = info;
708     {
709         std::lock_guard<std::mutex> lock(mtxDequeLock_);
710         AlignFirstFrameTime();
711         for (auto it = frameInfoDeque_.begin(); it != frameInfoDeque_.end(); it++) {
712             DCameraFrameInfo frameInfo = *it;
713             if (frameInfo.timePonit.finishDecode != 0) {
714                 continue;
715             }
716             frameInfo.timePonit.finishDecode = finishDecodeT;
717             frameInfoDeque_.emplace(frameInfoDeque_.erase(it), frameInfo);
718             break;
719         }
720     }
721     {
722         std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
723         if (videoDecoder_ == nullptr) {
724             DHLOGE("The video decoder does not exist before decoding data.");
725             return;
726         }
727         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
728         if (errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
729             DHLOGE("The video decoder output decoded data to surfacebuffer failed, index : [%{public}u].", index);
730         }
731     }
732 }
733 
GetSourceConfig() const734 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
735 {
736     return sourceConfig_;
737 }
738 
GetTargetConfig() const739 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
740 {
741     return targetConfig_;
742 }
743 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)744 int32_t DecodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
745 {
746     return DCAMERA_OK;
747 }
748 
AlignFirstFrameTime()749 void DecodeDataProcess::AlignFirstFrameTime()
750 {
751     if (frameInfoDeque_.empty()) {
752         return;
753     }
754     DCameraFrameInfo frameInfo = frameInfoDeque_.front();
755     if (frameInfo.index != FRAME_HEAD || frameInfo.type != MediaAVCodec::AVCODEC_BUFFER_FLAG_CODEC_DATA) {
756         return;
757     }
758     frameInfoDeque_.pop_front();
759     DCameraFrameInfo front = frameInfoDeque_.front();
760     frameInfo.index = front.index;
761     frameInfo.pts = front.pts;
762     frameInfo.offset = front.offset;
763     frameInfo.type = front.type;
764     frameInfo.ver = front.ver;
765     frameInfo.timePonit.finishEncode = front.timePonit.finishEncode;
766     frameInfoDeque_.emplace(frameInfoDeque_.erase(frameInfoDeque_.begin()), frameInfo);
767 }
768 } // namespace DistributedHardware
769 } // namespace OHOS
770