• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 
18 #include "distributed_camera_constants.h"
19 #include "distributed_hardware_log.h"
20 #include "dcamera_hisysevent_adapter.h"
21 #include "dcamera_hidumper.h"
22 #include "decode_surface_listener.h"
23 #include "decode_video_callback.h"
24 #include "graphic_common_c.h"
25 #include <sys/prctl.h>
26 
27 namespace OHOS {
28 namespace DistributedHardware {
29 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
30     "YUVI420", "NV12", "NV21", "RGBA_8888"
31 };
32 
~DecodeDataProcess()33 DecodeDataProcess::~DecodeDataProcess()
34 {
35     DumpFileUtil::CloseDumpFile(&dumpDecBeforeFile_);
36     DumpFileUtil::CloseDumpFile(&dumpDecAfterFile_);
37     if (isDecoderProcess_.load()) {
38         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
39         ReleaseProcessNode();
40     }
41 }
42 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)43 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
44     VideoConfigParams& processedConfig)
45 {
46     DHLOGD("Init DCamera DecodeNode start.");
47     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
48         DHLOGE("Source config or target config are invalid.");
49         return DCAMERA_BAD_VALUE;
50     }
51 
52     if (!IsConvertible(sourceConfig, targetConfig)) {
53         DHLOGE("The DecodeNode can't convert %{public}d to %{public}d.", sourceConfig.GetVideoCodecType(),
54             targetConfig_.GetVideoCodecType());
55         return DCAMERA_BAD_TYPE;
56     }
57 
58     sourceConfig_ = sourceConfig;
59     targetConfig_ = targetConfig;
60     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
61         DHLOGD("Disable DecodeNode. The target video codec type %{public}d is the same as the source video codec "
62             "type %{public}d.", targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType());
63         processedConfig_ = sourceConfig;
64         processedConfig = processedConfig_;
65         isDecoderProcess_.store(true);
66         return DCAMERA_OK;
67     }
68 
69     InitCodecEvent();
70     int32_t err = InitDecoder();
71     if (err != DCAMERA_OK) {
72         DHLOGE("Init video decoder failed.");
73         ReleaseProcessNode();
74         return err;
75     }
76     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
77     processedConfig = processedConfig_;
78     isDecoderProcess_.store(true);
79     return DCAMERA_OK;
80 }
81 
IsInDecoderRange(const VideoConfigParams & curConfig)82 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
83 {
84     bool isWidthValid = (curConfig.GetWidth() >= MIN_VIDEO_WIDTH && curConfig.GetWidth() <= MAX_VIDEO_WIDTH);
85     bool isHeightValid = (curConfig.GetHeight() >= MIN_VIDEO_HEIGHT && curConfig.GetHeight() <= MAX_VIDEO_HEIGHT);
86     bool isFrameRateValid = (curConfig.GetFrameRate() >= MIN_FRAME_RATE && curConfig.GetFrameRate() <= MAX_FRAME_RATE);
87     return isWidthValid && isHeightValid && isFrameRateValid;
88 }
89 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)90 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
91 {
92     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
93         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
94 }
95 
InitCodecEvent()96 void DecodeDataProcess::InitCodecEvent()
97 {
98     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
99     eventThread_ = std::thread([this]() { this->StartEventHandler(); });
100     std::unique_lock<std::mutex> lock(eventMutex_);
101     eventCon_.wait(lock, [this] {
102         return decEventHandler_ != nullptr;
103     });
104 }
105 
StartEventHandler()106 void DecodeDataProcess::StartEventHandler()
107 {
108     prctl(PR_SET_NAME, DECODE_DATA_EVENT.c_str());
109     auto runner = AppExecFwk::EventRunner::Create(false);
110     if (runner == nullptr) {
111         DHLOGE("Creat runner failed.");
112         return;
113     }
114     {
115         std::lock_guard<std::mutex> lock(eventMutex_);
116         decEventHandler_ = std::make_shared<AppExecFwk::EventHandler>(runner);
117     }
118     eventCon_.notify_one();
119     runner->Run();
120 }
121 
InitDecoder()122 int32_t DecodeDataProcess::InitDecoder()
123 {
124     DHLOGD("Init video decoder.");
125     int32_t ret = ConfigureVideoDecoder();
126     if (ret != DCAMERA_OK) {
127         DHLOGE("Init video decoder metadata format failed.");
128         return ret;
129     }
130 
131     ret = StartVideoDecoder();
132     if (ret != DCAMERA_OK) {
133         DHLOGE("Start Video decoder failed.");
134         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
135             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s",
136             sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
137             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
138         return ret;
139     }
140     return DCAMERA_OK;
141 }
142 
ConfigureVideoDecoder()143 int32_t DecodeDataProcess::ConfigureVideoDecoder()
144 {
145     int32_t ret = InitDecoderMetadataFormat();
146     if (ret != DCAMERA_OK) {
147         DHLOGE("Init video decoder metadata format failed. ret %{public}d.", ret);
148         return ret;
149     }
150 
151     videoDecoder_ = MediaAVCodec::VideoDecoderFactory::CreateByMime(processType_);
152     if (videoDecoder_ == nullptr) {
153         DHLOGE("Create video decoder failed.");
154         return DCAMERA_INIT_ERR;
155     }
156     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
157     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
158     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
159         DHLOGE("Set video decoder callback failed. ret %{public}d.", ret);
160         return DCAMERA_INIT_ERR;
161     }
162 
163     ret = videoDecoder_->Configure(metadataFormat_);
164     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
165         DHLOGE("Set video decoder metadata format failed. ret %{public}d.", ret);
166         return DCAMERA_INIT_ERR;
167     }
168 
169     ret = SetDecoderOutputSurface();
170     if (ret != DCAMERA_OK) {
171         DHLOGE("Set decoder outputsurface failed. ret %{public}d.", ret);
172         return ret;
173     }
174 
175     return DCAMERA_OK;
176 }
177 
InitDecoderMetadataFormat()178 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
179 {
180     DHLOGI("Init video decoder metadata format. codecType: %{public}d", sourceConfig_.GetVideoCodecType());
181     processedConfig_ = sourceConfig_;
182     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
183     switch (sourceConfig_.GetVideoCodecType()) {
184         case VideoCodecType::CODEC_H264:
185             processType_ = "video/avc";
186             break;
187         case VideoCodecType::CODEC_H265:
188             processType_ = "video/hevc";
189             break;
190         default:
191             DHLOGE("The current codec type does not support decoding.");
192             return DCAMERA_NOT_FOUND;
193     }
194 
195     DHLOGI("Init video decoder metadata format. videoformat: %{public}d", processedConfig_.GetVideoformat());
196     switch (processedConfig_.GetVideoformat()) {
197         case Videoformat::YUVI420:
198             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::YUVI420));
199             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
200             break;
201         case Videoformat::NV12:
202             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
203             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
204             break;
205         case Videoformat::NV21:
206             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV21));
207             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
208             break;
209         case Videoformat::RGBA_8888:
210             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::RGBA));
211             metadataFormat_.PutIntValue("max_input_size", MAX_RGB32_BUFFER_SIZE);
212             break;
213         default:
214             DHLOGE("The current pixel format does not support encoding.");
215             return DCAMERA_NOT_FOUND;
216     }
217 
218     metadataFormat_.PutStringValue("codec_mime", processType_);
219     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
220     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
221     metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
222 
223     return DCAMERA_OK;
224 }
225 
SetDecoderOutputSurface()226 int32_t DecodeDataProcess::SetDecoderOutputSurface()
227 {
228     DHLOGD("Set the video decoder output surface.");
229     if (videoDecoder_ == nullptr) {
230         DHLOGE("The video decoder is null.");
231         return DCAMERA_BAD_VALUE;
232     }
233 
234     decodeConsumerSurface_ = IConsumerSurface::Create();
235     if (decodeConsumerSurface_ == nullptr) {
236         DHLOGE("Create the decode consumer surface failed.");
237         return DCAMERA_INIT_ERR;
238     }
239     decodeConsumerSurface_->SetDefaultWidthAndHeight(static_cast<int32_t>(sourceConfig_.GetWidth()),
240         static_cast<int32_t>(sourceConfig_.GetHeight()));
241     GSError ret = decodeConsumerSurface_->SetDefaultUsage(SurfaceBufferUsage::BUFFER_USAGE_MEM_MMZ_CACHE |
242         SurfaceBufferUsage::BUFFER_USAGE_CPU_READ);
243     if (ret != GSERROR_OK || decodeConsumerSurface_ == nullptr) {
244         DHLOGE("Set Usage failed.");
245     }
246 
247     decodeSurfaceListener_ =
248         OHOS::sptr<IBufferConsumerListener>(new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this()));
249     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
250         SURFACE_ERROR_OK) {
251         DHLOGE("Register consumer listener failed.");
252         return DCAMERA_INIT_ERR;
253     }
254 
255     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
256     if (surfaceProducer == nullptr) {
257         DHLOGE("Get the surface producer of the decode consumer surface failed.");
258         return DCAMERA_INIT_ERR;
259     }
260     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
261     if (decodeProducerSurface_ == nullptr) {
262         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
263         return DCAMERA_INIT_ERR;
264     }
265 
266     DHLOGD("Set the producer surface to video decoder output surface.");
267     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
268     if (err != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
269         DHLOGE("Set decoder output surface failed.");
270         return DCAMERA_INIT_ERR;
271     }
272     return DCAMERA_OK;
273 }
274 
StartVideoDecoder()275 int32_t DecodeDataProcess::StartVideoDecoder()
276 {
277     if (videoDecoder_ == nullptr) {
278         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
279         return DCAMERA_BAD_VALUE;
280     }
281 
282     int32_t ret = videoDecoder_->Prepare();
283     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
284         DHLOGE("Video decoder prepare failed. ret %{public}d.", ret);
285         return DCAMERA_INIT_ERR;
286     }
287     ret = videoDecoder_->Start();
288     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
289         DHLOGE("Video decoder start failed. ret %{public}d.", ret);
290         return DCAMERA_INIT_ERR;
291     }
292     return DCAMERA_OK;
293 }
294 
StopVideoDecoder()295 int32_t DecodeDataProcess::StopVideoDecoder()
296 {
297     if (videoDecoder_ == nullptr) {
298         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
299         return DCAMERA_BAD_VALUE;
300     }
301 
302     bool isSuccess = true;
303     int32_t ret = videoDecoder_->Flush();
304     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
305         DHLOGE("VideoDecoder flush failed. ret %{public}d.", ret);
306         isSuccess = isSuccess && false;
307     }
308     ret = videoDecoder_->Stop();
309     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
310         DHLOGE("VideoDecoder stop failed. ret %{public}d.", ret);
311         isSuccess = isSuccess && false;
312     }
313     if (!isSuccess) {
314         return DCAMERA_BAD_OPERATE;
315     }
316     return DCAMERA_OK;
317 }
318 
ReleaseVideoDecoder()319 void DecodeDataProcess::ReleaseVideoDecoder()
320 {
321     DHLOGD("Start release videoDecoder.");
322     std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
323     std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
324     if (videoDecoder_ == nullptr) {
325         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
326         decodeVideoCallback_ = nullptr;
327         return;
328     }
329     int32_t ret = StopVideoDecoder();
330     if (ret != DCAMERA_OK) {
331         DHLOGE("StopVideoDecoder failed.");
332     }
333     ret = videoDecoder_->Release();
334     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
335         DHLOGE("VideoDecoder release failed. ret %{public}d.", ret);
336     }
337     videoDecoder_ = nullptr;
338     decodeVideoCallback_ = nullptr;
339 }
340 
ReleaseDecoderSurface()341 void DecodeDataProcess::ReleaseDecoderSurface()
342 {
343     if (decodeConsumerSurface_ == nullptr) {
344         decodeProducerSurface_ = nullptr;
345         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
346         return;
347     }
348     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
349     if (ret != SURFACE_ERROR_OK) {
350         DHLOGE("Unregister consumer listener failed. ret %{public}d.", ret);
351     }
352     decodeConsumerSurface_ = nullptr;
353     decodeProducerSurface_ = nullptr;
354 }
355 
ReleaseCodecEvent()356 void DecodeDataProcess::ReleaseCodecEvent()
357 {
358     if ((decEventHandler_ != nullptr) && (decEventHandler_->GetEventRunner() != nullptr)) {
359         decEventHandler_->GetEventRunner()->Stop();
360         eventThread_.join();
361     }
362     decEventHandler_ = nullptr;
363     pipeSrcEventHandler_ = nullptr;
364     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
365 }
366 
ReleaseProcessNode()367 void DecodeDataProcess::ReleaseProcessNode()
368 {
369     DHLOGD("Start release [%{public}zu] node : DecodeNode.", nodeRank_);
370     isDecoderProcess_.store(false);
371     ReleaseVideoDecoder();
372     ReleaseDecoderSurface();
373     ReleaseCodecEvent();
374 
375     processType_ = "";
376     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
377     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
378     std::queue<std::shared_ptr<Media::AVSharedMemory>>().swap(availableInputBufferQueue_);
379     std::deque<DCameraFrameInfo>().swap(frameInfoDeque_);
380     waitDecoderOutputCount_ = 0;
381     lastFeedDecoderInputBufferTimeUs_ = 0;
382     outputTimeStampUs_ = 0;
383     alignedHeight_ = 0;
384 
385     if (nextDataProcess_ != nullptr) {
386         nextDataProcess_->ReleaseProcessNode();
387         nextDataProcess_ = nullptr;
388     }
389     DHLOGD("Release [%{public}zu] node : DecodeNode end.", nodeRank_);
390 }
391 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)392 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
393 {
394     DHLOGD("Process data in DecodeDataProcess.");
395     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
396         DHLOGE("The input data buffers is empty.");
397         return DCAMERA_BAD_VALUE;
398     }
399     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_BEFORE_DEC_FILENAME, &dumpDecBeforeFile_);
400     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_AFTER_DEC_FILENAME, &dumpDecAfterFile_);
401     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
402         DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
403             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
404         return DecodeDone(inputBuffers);
405     }
406 
407     if (videoDecoder_ == nullptr) {
408         DHLOGE("The video decoder does not exist before decoding data.");
409         return DCAMERA_INIT_ERR;
410     }
411     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
412         DHLOGE("video decoder input buffers queue over flow.");
413         return DCAMERA_INDEX_OVERFLOW;
414     }
415     if (inputBuffers[0]->Size() > MAX_BUFFER_SIZE) {
416         DHLOGE("DecodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
417         return DCAMERA_MEMORY_OPT_ERROR;
418     }
419     if (!isDecoderProcess_.load()) {
420         DHLOGE("Decoder node occurred error or start release.");
421         return DCAMERA_DISABLE_PROCESS;
422     }
423     inputBuffersQueue_.push(inputBuffers[0]);
424     DHLOGD("Push inputBuf sucess. BufSize %{public}zu, QueueSize %{public}zu.", inputBuffers[0]->Size(),
425         inputBuffersQueue_.size());
426     int32_t err = FeedDecoderInputBuffer();
427     if (err != DCAMERA_OK) {
428         int32_t sleepTimeUs = 5000;
429         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
430         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
431         auto sendFunc = [this]() mutable {
432             int32_t ret = FeedDecoderInputBuffer();
433             DHLOGD("excute FeedDecoderInputBuffer ret %{public}d.", ret);
434         };
435         CHECK_AND_RETURN_RET_LOG(pipeSrcEventHandler_ == nullptr, DCAMERA_BAD_VALUE,
436             "%{public}s", "pipeSrcEventHandler_ is nullptr.");
437         pipeSrcEventHandler_->PostTask(sendFunc);
438     }
439     return DCAMERA_OK;
440 }
441 
BeforeDecodeDump(uint8_t * buffer,size_t bufSize)442 void DecodeDataProcess::BeforeDecodeDump(uint8_t *buffer, size_t bufSize)
443 {
444 #ifdef DUMP_DCAMERA_FILE
445     if (buffer == nullptr) {
446         DHLOGE("dumpsaving : input param nullptr.");
447         return;
448     }
449     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + BEFORE_DECODE) == DCAMERA_OK)) {
450         DumpBufferToFile(DUMP_PATH + BEFORE_DECODE, buffer, bufSize);
451     }
452 #endif
453     return;
454 }
455 
FeedDecoderInputBuffer()456 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
457 {
458     DHLOGD("Feed decoder input buffer.");
459     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
460         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
461         if (buffer == nullptr || availableInputIndexsQueue_.empty() || availableInputBufferQueue_.empty()) {
462             DHLOGE("inputBuffersQueue size %{public}zu, availableInputIndexsQueue size %{public}zu, "
463                 "availableInputBufferQueue size %{public}zu",
464                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size(), availableInputBufferQueue_.size());
465             return DCAMERA_BAD_VALUE;
466         }
467         buffer->frameInfo_.timePonit.startDecode = GetNowTimeStampUs();
468         {
469             std::lock_guard<std::mutex> lock(mtxDequeLock_);
470             frameInfoDeque_.push_back(buffer->frameInfo_);
471         }
472         int64_t timeStamp = buffer->frameInfo_.pts;
473         {
474             std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
475             CHECK_AND_RETURN_RET_LOG(
476                 videoDecoder_ == nullptr, DCAMERA_OK, "The video decoder does not exist before GetInputBuffer.");
477             uint32_t index = availableInputIndexsQueue_.front();
478             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = availableInputBufferQueue_.front();
479             if (sharedMemoryInput == nullptr) {
480                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%{public}u] index.", index);
481                 return DCAMERA_BAD_VALUE;
482             }
483             BeforeDecodeDump(buffer->Data(), buffer->Size());
484             DumpFileUtil::WriteDumpFile(dumpDecBeforeFile_, static_cast<void *>(buffer->Data()), buffer->Size());
485             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
486             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
487             CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "memcpy_s buffer failed.");
488             DHLOGD("Decoder input buffer size %{public}zu, timeStamp %{public}" PRId64"us.", buffer->Size(), timeStamp);
489             MediaAVCodec::AVCodecBufferInfo bufferInfo {timeStamp, static_cast<int32_t>(buffer->Size()), 0};
490             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
491                 MediaAVCodec::AVCODEC_BUFFER_FLAG_NONE);
492             if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
493                 DHLOGE("queue Input buffer failed.");
494                 return DCAMERA_BAD_OPERATE;
495             }
496         }
497 
498         inputBuffersQueue_.pop();
499         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %{public}zu.", inputBuffersQueue_.size());
500 
501         IncreaseWaitDecodeCnt();
502     }
503     return DCAMERA_OK;
504 }
505 
GetDecoderTimeStamp()506 int64_t DecodeDataProcess::GetDecoderTimeStamp()
507 {
508     int64_t TimeIntervalStampUs = 0;
509     int64_t nowTimeUs = GetNowTimeStampUs();
510     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
511         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
512         return TimeIntervalStampUs;
513     }
514     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
515     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
516     return TimeIntervalStampUs;
517 }
518 
IncreaseWaitDecodeCnt()519 void DecodeDataProcess::IncreaseWaitDecodeCnt()
520 {
521     std::lock_guard<std::mutex> lck(mtxHoldCount_);
522     availableInputIndexsQueue_.pop();
523     availableInputBufferQueue_.pop();
524     waitDecoderOutputCount_++;
525     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
526 }
527 
ReduceWaitDecodeCnt()528 void DecodeDataProcess::ReduceWaitDecodeCnt()
529 {
530     std::lock_guard<std::mutex> lck(mtxHoldCount_);
531     if (waitDecoderOutputCount_ <= 0) {
532         DHLOGE("The waitDecoderOutputCount_ = %{public}d.", waitDecoderOutputCount_);
533     }
534     if (outputTimeStampUs_ == 0) {
535         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
536     } else {
537         waitDecoderOutputCount_--;
538     }
539     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
540 }
541 
OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface> & surface)542 void DecodeDataProcess::OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface>& surface)
543 {
544     auto sendFunc = [this, surface]() mutable {
545         GetDecoderOutputBuffer(surface);
546         DHLOGD("excute GetDecoderOutputBuffer.");
547     };
548     if (decEventHandler_ != nullptr) {
549         decEventHandler_->PostTask(sendFunc);
550     }
551 }
552 
GetDecoderOutputBuffer(const sptr<IConsumerSurface> & surface)553 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<IConsumerSurface>& surface)
554 {
555     DHLOGD("Get decoder output buffer.");
556     if (surface == nullptr) {
557         DHLOGE("Get decode consumer surface failed.");
558         return;
559     }
560     Rect damage = {0, 0, 0, 0};
561     int32_t acquireFence = 0;
562     int64_t timeStamp = 0;
563     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
564     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStamp, damage);
565     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
566         DHLOGE("Acquire surface buffer failed!");
567         return;
568     }
569     int32_t alignedWidth = surfaceBuffer->GetStride();
570     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
571         DHLOGE("surface buffer size or alignedWidth too long");
572         return;
573     }
574     int32_t alignedHeight = alignedHeight_;
575     DHLOGD("OutputBuffer alignedWidth %{public}d, alignedHeight %{public}d, timeStamp %{public}" PRId64" ns.",
576         alignedWidth, alignedHeight, timeStamp);
577     CopyDecodedImage(surfaceBuffer, alignedWidth, alignedHeight);
578     surface->ReleaseBuffer(surfaceBuffer, -1);
579     outputTimeStampUs_ = timeStamp;
580     ReduceWaitDecodeCnt();
581 }
582 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)583 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
584     int32_t alignedHeight)
585 {
586     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
587         DHLOGE("Surface output buffer error.");
588         return;
589     }
590 
591     size_t imageSize = 0;
592     if (processedConfig_.GetVideoformat() == Videoformat::RGBA_8888) {
593         imageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
594             RGB32_MEMORY_COEFFICIENT);
595     } else {
596         imageSize = static_cast<size_t>(
597             sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
598     }
599     std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(imageSize);
600     uint8_t *addr = static_cast<uint8_t *>(surBuf->GetVirAddr());
601     errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(), addr, imageSize);
602     if (err != EOK) {
603         DHLOGE("memcpy_s surface buffer failed.");
604         return;
605     }
606     {
607         std::lock_guard<std::mutex> lock(mtxDequeLock_);
608         bufferOutput->frameInfo_ = frameInfoDeque_.front();
609         frameInfoDeque_.pop_front();
610     }
611     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(processedConfig_.GetVideoformat()));
612     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
613     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
614     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
615     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
616 #ifdef DUMP_DCAMERA_FILE
617     std::string fileName = "SourceAfterDecode_width(" + std::to_string(processedConfig_.GetWidth())
618         + ")height(" + std::to_string(processedConfig_.GetHeight()) + ").yuv";
619     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + fileName) == DCAMERA_OK)) {
620         DumpBufferToFile(DUMP_PATH + fileName, bufferOutput->Data(), bufferOutput->Size());
621     }
622 #endif
623     DumpFileUtil::WriteDumpFile(dumpDecAfterFile_, static_cast<void *>(bufferOutput->Data()), bufferOutput->Size());
624     PostOutputDataBuffers(bufferOutput);
625 }
626 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)627 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
628     int32_t alignedHeight)
629 {
630     if (surBuf == nullptr) {
631         DHLOGE("surface buffer is null!");
632         return false;
633     }
634 
635     if (processedConfig_.GetVideoformat() == Videoformat::RGBA_8888) {
636         size_t rgbImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
637             RGB32_MEMORY_COEFFICIENT);
638         size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
639         if (rgbImageSize > surfaceBufSize) {
640             DHLOGE("Buffer size error, rgbImageSize %{public}zu, surBufSize %{public}" PRIu32, rgbImageSize,
641                 surBuf->GetSize());
642             return false;
643         }
644     } else {
645         size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
646         size_t yuvImageAlignedSize = static_cast<size_t>(
647             alignedWidth * alignedHeight * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
648         size_t yuvImageSize = static_cast<size_t>(
649             sourceConfig_.GetWidth() * sourceConfig_.GetHeight() * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
650         if (yuvImageAlignedSize > surfaceBufSize || yuvImageAlignedSize < yuvImageSize) {
651             DHLOGE("Buffer size error, yuvImageSize %{public}zu, yuvImageAlignedSize %{public}zu, surBufSize "
652                 "%{public}" PRIu32, yuvImageSize, yuvImageAlignedSize, surBuf->GetSize());
653             return false;
654         }
655     }
656     return true;
657 }
658 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)659 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
660 {
661     if (decEventHandler_ == nullptr || outputBuffer == nullptr) {
662         DHLOGE("decEventHandler_ or outputBuffer is null.");
663         return;
664     }
665     auto sendFunc = [this, outputBuffer]() mutable {
666         std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
667         multiDataBuffers.push_back(outputBuffer);
668         int32_t ret = DecodeDone(multiDataBuffers);
669         DHLOGD("excute DecodeDone ret %{public}d.", ret);
670     };
671     if (decEventHandler_ != nullptr) {
672         decEventHandler_->PostTask(sendFunc);
673     }
674     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
675 }
676 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)677 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
678 {
679     DHLOGD("Decoder Done.");
680     if (outputBuffers.empty()) {
681         DHLOGE("The received data buffers is empty.");
682         return DCAMERA_BAD_VALUE;
683     }
684 
685     if (nextDataProcess_ != nullptr) {
686         DHLOGD("Send to the next node of the decoder for processing.");
687         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
688         if (err != DCAMERA_OK) {
689             DHLOGE("Someone node after the decoder processes failed.");
690         }
691         return err;
692     }
693     DHLOGD("The current node is the last node, and Output the processed video buffer");
694     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
695     if (targetPipelineSource == nullptr) {
696         DHLOGE("callbackPipelineSource_ is nullptr.");
697         return DCAMERA_BAD_VALUE;
698     }
699     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
700     return DCAMERA_OK;
701 }
702 
OnError()703 void DecodeDataProcess::OnError()
704 {
705     DHLOGD("DecodeDataProcess : OnError.");
706     isDecoderProcess_.store(false);
707     if (videoDecoder_ != nullptr) {
708         videoDecoder_->Stop();
709     }
710     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
711     if (targetPipelineSource == nullptr) {
712         DHLOGE("callbackPipelineSource_ is nullptr.");
713         return;
714     }
715     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
716 }
717 
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)718 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
719 {
720     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
721     std::lock_guard<std::mutex> lck(mtxHoldCount_);
722     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
723         DHLOGE("Video decoder available indexs queue overflow.");
724         return;
725     }
726     DHLOGD("Video decoder available indexs queue push index [%{public}u].", index);
727     availableInputIndexsQueue_.push(index);
728     availableInputBufferQueue_.push(buffer);
729 }
730 
OnOutputFormatChanged(const Media::Format & format)731 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
732 {
733     if (decodeOutputFormat_.GetFormatMap().empty()) {
734         DHLOGE("The first changed video decoder output format is null.");
735         return;
736     }
737     decodeOutputFormat_ = format;
738 }
739 
OnOutputBufferAvailable(uint32_t index,const MediaAVCodec::AVCodecBufferInfo & info,const MediaAVCodec::AVCodecBufferFlag & flag,std::shared_ptr<Media::AVSharedMemory> buffer)740 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const MediaAVCodec::AVCodecBufferInfo& info,
741     const MediaAVCodec::AVCodecBufferFlag& flag, std::shared_ptr<Media::AVSharedMemory> buffer)
742 {
743     int64_t finishDecodeT = GetNowTimeStampUs();
744     if (!isDecoderProcess_.load()) {
745         DHLOGE("Decoder node occurred error or start release.");
746         return;
747     }
748     DHLOGD("Video decode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, flag "
749         "%{public}" PRIu32, info.presentationTimeUs, info.size, info.offset, flag);
750     outputInfo_ = info;
751     {
752         std::lock_guard<std::mutex> lock(mtxDequeLock_);
753         AlignFirstFrameTime();
754         for (auto it = frameInfoDeque_.begin(); it != frameInfoDeque_.end(); it++) {
755             DCameraFrameInfo frameInfo = *it;
756             if (frameInfo.timePonit.finishDecode != 0) {
757                 continue;
758             }
759             frameInfo.timePonit.finishDecode = finishDecodeT;
760             frameInfoDeque_.emplace(frameInfoDeque_.erase(it), frameInfo);
761             break;
762         }
763     }
764     {
765         std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
766         if (videoDecoder_ == nullptr) {
767             DHLOGE("The video decoder does not exist before decoding data.");
768             return;
769         }
770         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
771         if (errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
772             DHLOGE("The video decoder output decoded data to surfacebuffer failed, index : [%{public}u].", index);
773         }
774     }
775 }
776 
GetSourceConfig() const777 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
778 {
779     return sourceConfig_;
780 }
781 
GetTargetConfig() const782 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
783 {
784     return targetConfig_;
785 }
786 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)787 int32_t DecodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
788 {
789     return DCAMERA_OK;
790 }
791 
AlignFirstFrameTime()792 void DecodeDataProcess::AlignFirstFrameTime()
793 {
794     if (frameInfoDeque_.size() < FIRST_FRAME_INPUT_NUM) {
795         return;
796     }
797     DCameraFrameInfo frameInfo = frameInfoDeque_.front();
798     if (frameInfo.index != FRAME_HEAD || frameInfo.type != MediaAVCodec::AVCODEC_BUFFER_FLAG_CODEC_DATA) {
799         return;
800     }
801     frameInfoDeque_.pop_front();
802     DCameraFrameInfo front = frameInfoDeque_.front();
803     frameInfo.index = front.index;
804     frameInfo.pts = front.pts;
805     frameInfo.offset = front.offset;
806     frameInfo.type = front.type;
807     frameInfo.ver = front.ver;
808     frameInfo.timePonit.finishEncode = front.timePonit.finishEncode;
809     frameInfoDeque_.emplace(frameInfoDeque_.erase(frameInfoDeque_.begin()), frameInfo);
810 }
811 } // namespace DistributedHardware
812 } // namespace OHOS
813