• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 
18 #include "distributed_camera_constants.h"
19 #include "distributed_hardware_log.h"
20 #include "dcamera_hisysevent_adapter.h"
21 #include "dcamera_hidumper.h"
22 #include "dcamera_radar.h"
23 #include "decode_surface_listener.h"
24 #include "decode_video_callback.h"
25 #include "graphic_common_c.h"
26 #include <sys/prctl.h>
27 
28 namespace OHOS {
29 namespace DistributedHardware {
30 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
31     "YUVI420", "NV12", "NV21", "RGBA_8888"
32 };
33 
~DecodeDataProcess()34 DecodeDataProcess::~DecodeDataProcess()
35 {
36     DumpFileUtil::CloseDumpFile(&dumpDecBeforeFile_);
37     DumpFileUtil::CloseDumpFile(&dumpDecAfterFile_);
38     if (isDecoderProcess_.load()) {
39         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
40         ReleaseProcessNode();
41     }
42 }
43 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)44 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
45     VideoConfigParams& processedConfig)
46 {
47     DHLOGD("Init DCamera DecodeNode start.");
48     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
49         DHLOGE("Source config or target config are invalid.");
50         return DCAMERA_BAD_VALUE;
51     }
52 
53     if (!IsConvertible(sourceConfig, targetConfig)) {
54         DHLOGE("The DecodeNode can't convert %{public}d to %{public}d.", sourceConfig.GetVideoCodecType(),
55             targetConfig_.GetVideoCodecType());
56         return DCAMERA_BAD_TYPE;
57     }
58 
59     sourceConfig_ = sourceConfig;
60     targetConfig_ = targetConfig;
61     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
62         DHLOGD("Disable DecodeNode. The target video codec type %{public}d is the same as the source video codec "
63             "type %{public}d.", targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType());
64         processedConfig_ = sourceConfig;
65         processedConfig = processedConfig_;
66         isDecoderProcess_.store(true);
67         return DCAMERA_OK;
68     }
69 
70     InitCodecEvent();
71     int32_t err = InitDecoder();
72     DcameraRadar::GetInstance().ReportDcameraOpenProgress("InitDecoder", CameraOpen::INIT_DECODE, err);
73     if (err != DCAMERA_OK) {
74         DHLOGE("Init video decoder failed.");
75         ReleaseProcessNode();
76         return err;
77     }
78     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
79     processedConfig = processedConfig_;
80     isDecoderProcess_.store(true);
81     return DCAMERA_OK;
82 }
83 
IsInDecoderRange(const VideoConfigParams & curConfig)84 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
85 {
86     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
87         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
88         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
89 }
90 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)91 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
92 {
93     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
94         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
95 }
96 
InitCodecEvent()97 void DecodeDataProcess::InitCodecEvent()
98 {
99     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
100     eventThread_ = std::thread([this]() { this->StartEventHandler(); });
101     std::unique_lock<std::mutex> lock(eventMutex_);
102     eventCon_.wait(lock, [this] {
103         return decEventHandler_ != nullptr;
104     });
105 }
106 
StartEventHandler()107 void DecodeDataProcess::StartEventHandler()
108 {
109     prctl(PR_SET_NAME, DECODE_DATA_EVENT.c_str());
110     auto runner = AppExecFwk::EventRunner::Create(false);
111     if (runner == nullptr) {
112         DHLOGE("Creat runner failed.");
113         return;
114     }
115     {
116         std::lock_guard<std::mutex> lock(eventMutex_);
117         decEventHandler_ = std::make_shared<AppExecFwk::EventHandler>(runner);
118     }
119     eventCon_.notify_one();
120     runner->Run();
121 }
122 
InitDecoder()123 int32_t DecodeDataProcess::InitDecoder()
124 {
125     DHLOGD("Init video decoder.");
126     int32_t ret = ConfigureVideoDecoder();
127     if (ret != DCAMERA_OK) {
128         DHLOGE("Init video decoder metadata format failed.");
129         return ret;
130     }
131 
132     ret = StartVideoDecoder();
133     if (ret != DCAMERA_OK) {
134         DHLOGE("Start Video decoder failed.");
135         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
136             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s",
137             sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
138             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
139         return ret;
140     }
141     return DCAMERA_OK;
142 }
143 
ConfigureVideoDecoder()144 int32_t DecodeDataProcess::ConfigureVideoDecoder()
145 {
146     int32_t ret = InitDecoderMetadataFormat();
147     if (ret != DCAMERA_OK) {
148         DHLOGE("Init video decoder metadata format failed. ret %{public}d.", ret);
149         return ret;
150     }
151 
152     videoDecoder_ = MediaAVCodec::VideoDecoderFactory::CreateByMime(processType_);
153     CHECK_AND_RETURN_RET_LOG(videoDecoder_ == nullptr, DCAMERA_INIT_ERR, "%{public}s",
154         "Create video decoder failed.");
155     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
156     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
157     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
158         "Set video decoder callback failed.  ret %{public}d.", ret);
159     ret = videoDecoder_->Configure(metadataFormat_);
160     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
161         "Set video decoder metadata format failed. ret %{public}d.", ret);
162     ret = SetDecoderOutputSurface();
163     CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
164         "Set decoder output surface failed. ret %d.", ret);
165     return DCAMERA_OK;
166 }
167 
InitDecoderMetadataFormat()168 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
169 {
170     DHLOGI("Init video decoder metadata format. codecType: %{public}d", sourceConfig_.GetVideoCodecType());
171     processedConfig_ = sourceConfig_;
172     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
173     processedConfig_.SetVideoformat(Videoformat::YUVI420);
174     switch (sourceConfig_.GetVideoCodecType()) {
175         case VideoCodecType::CODEC_H264:
176             processType_ = "video/avc";
177             break;
178         case VideoCodecType::CODEC_H265:
179             processType_ = "video/hevc";
180             break;
181         default:
182             DHLOGE("The current codec type does not support decoding.");
183             return DCAMERA_NOT_FOUND;
184     }
185 
186     metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
187     metadataFormat_.PutStringValue("codec_mime", processType_);
188     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
189     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
190     metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
191 
192     return DCAMERA_OK;
193 }
194 
SetDecoderOutputSurface()195 int32_t DecodeDataProcess::SetDecoderOutputSurface()
196 {
197     DHLOGD("Set the video decoder output surface.");
198     if (videoDecoder_ == nullptr) {
199         DHLOGE("The video decoder is null.");
200         return DCAMERA_BAD_VALUE;
201     }
202 
203     decodeConsumerSurface_ = IConsumerSurface::Create();
204     if (decodeConsumerSurface_ == nullptr) {
205         DHLOGE("Create the decode consumer surface failed.");
206         return DCAMERA_INIT_ERR;
207     }
208     decodeConsumerSurface_->SetDefaultWidthAndHeight(static_cast<int32_t>(sourceConfig_.GetWidth()),
209         static_cast<int32_t>(sourceConfig_.GetHeight()));
210     GSError ret = decodeConsumerSurface_->SetDefaultUsage(SurfaceBufferUsage::BUFFER_USAGE_MEM_MMZ_CACHE |
211         SurfaceBufferUsage::BUFFER_USAGE_CPU_READ);
212     CHECK_AND_LOG(ret != GSERROR_OK || decodeConsumerSurface_ == nullptr, "%{public}s", "Set Usage failed.");
213 
214     decodeSurfaceListener_ = new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this());
215     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
216         SURFACE_ERROR_OK) {
217         DHLOGE("Register consumer listener failed.");
218         return DCAMERA_INIT_ERR;
219     }
220 
221     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
222     CHECK_AND_RETURN_RET_LOG(surfaceProducer == nullptr, DCAMERA_INIT_ERR, "%{public}s",
223         "Get the surface producer of the decode consumer surface failed.");
224     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
225     if (decodeProducerSurface_ == nullptr) {
226         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
227         return DCAMERA_INIT_ERR;
228     }
229 
230     DHLOGD("Set the producer surface to video decoder output surface.");
231     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
232     CHECK_AND_RETURN_RET_LOG(err != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR, "%{public}s",
233         "Set decoder output surface failed.");
234     return DCAMERA_OK;
235 }
236 
StartVideoDecoder()237 int32_t DecodeDataProcess::StartVideoDecoder()
238 {
239     if (videoDecoder_ == nullptr) {
240         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
241         return DCAMERA_BAD_VALUE;
242     }
243 
244     int32_t ret = videoDecoder_->Prepare();
245     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
246         "Video decoder prepare failed. ret %{public}d.", ret);
247     ret = videoDecoder_->Start();
248     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
249         "Video decoder start failed. ret %{public}d.", ret);
250     return DCAMERA_OK;
251 }
252 
StopVideoDecoder()253 int32_t DecodeDataProcess::StopVideoDecoder()
254 {
255     if (videoDecoder_ == nullptr) {
256         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
257         return DCAMERA_BAD_VALUE;
258     }
259 
260     bool isSuccess = true;
261     int32_t ret = videoDecoder_->Flush();
262     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
263         DHLOGE("VideoDecoder flush failed. ret %{public}d.", ret);
264         isSuccess = isSuccess && false;
265     }
266     ret = videoDecoder_->Stop();
267     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
268         DHLOGE("VideoDecoder stop failed. ret %{public}d.", ret);
269         isSuccess = isSuccess && false;
270     }
271     if (!isSuccess) {
272         return DCAMERA_BAD_OPERATE;
273     }
274     return DCAMERA_OK;
275 }
276 
ReleaseVideoDecoder()277 void DecodeDataProcess::ReleaseVideoDecoder()
278 {
279     DHLOGD("Start release videoDecoder.");
280     std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
281     std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
282     if (videoDecoder_ == nullptr) {
283         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
284         decodeVideoCallback_ = nullptr;
285         return;
286     }
287     int32_t ret = StopVideoDecoder();
288     CHECK_AND_LOG(ret != DCAMERA_OK, "%{public}s", "StopVideoDecoder failed.");
289     ret = videoDecoder_->Release();
290     CHECK_AND_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
291         "VideoDecoder release failed. ret %{public}d.", ret);
292     videoDecoder_ = nullptr;
293     decodeVideoCallback_ = nullptr;
294 }
295 
ReleaseDecoderSurface()296 void DecodeDataProcess::ReleaseDecoderSurface()
297 {
298     if (decodeConsumerSurface_ == nullptr) {
299         decodeProducerSurface_ = nullptr;
300         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
301         return;
302     }
303     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
304     CHECK_AND_LOG(ret != SURFACE_ERROR_OK, "VideoDecoder release failed. ret %d.", ret);
305     decodeConsumerSurface_ = nullptr;
306     decodeProducerSurface_ = nullptr;
307 }
308 
ReleaseCodecEvent()309 void DecodeDataProcess::ReleaseCodecEvent()
310 {
311     if ((decEventHandler_ != nullptr) && (decEventHandler_->GetEventRunner() != nullptr)) {
312         decEventHandler_->GetEventRunner()->Stop();
313         eventThread_.join();
314     }
315     decEventHandler_ = nullptr;
316     pipeSrcEventHandler_ = nullptr;
317     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
318 }
319 
ReleaseProcessNode()320 void DecodeDataProcess::ReleaseProcessNode()
321 {
322     DHLOGD("Start release [%{public}zu] node : DecodeNode.", nodeRank_);
323     isDecoderProcess_.store(false);
324     ReleaseVideoDecoder();
325     ReleaseDecoderSurface();
326     ReleaseCodecEvent();
327 
328     processType_ = "";
329     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
330     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
331     std::queue<std::shared_ptr<Media::AVSharedMemory>>().swap(availableInputBufferQueue_);
332     std::deque<DCameraFrameInfo>().swap(frameInfoDeque_);
333     waitDecoderOutputCount_ = 0;
334     lastFeedDecoderInputBufferTimeUs_ = 0;
335     outputTimeStampUs_ = 0;
336     alignedHeight_ = 0;
337 
338     if (nextDataProcess_ != nullptr) {
339         nextDataProcess_->ReleaseProcessNode();
340         nextDataProcess_ = nullptr;
341     }
342     DHLOGD("Release [%{public}zu] node : DecodeNode end.", nodeRank_);
343 }
344 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)345 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
346 {
347     DHLOGD("Process data in DecodeDataProcess.");
348     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
349         DHLOGE("The input data buffers is empty.");
350         return DCAMERA_BAD_VALUE;
351     }
352     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_BEFORE_DEC_FILENAME, &dumpDecBeforeFile_);
353     DumpFileUtil::OpenDumpFile(DUMP_SERVER_PARA, DUMP_DCAMERA_AFTER_DEC_FILENAME, &dumpDecAfterFile_);
354     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
355         DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
356             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
357         return DecodeDone(inputBuffers);
358     }
359 
360     if (videoDecoder_ == nullptr) {
361         DHLOGE("The video decoder does not exist before decoding data.");
362         return DCAMERA_INIT_ERR;
363     }
364     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
365         DHLOGE("video decoder input buffers queue over flow.");
366         return DCAMERA_INDEX_OVERFLOW;
367     }
368     if (inputBuffers[0]->Size() > MAX_YUV420_BUFFER_SIZE) {
369         DHLOGE("DecodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
370         return DCAMERA_MEMORY_OPT_ERROR;
371     }
372     if (!isDecoderProcess_.load()) {
373         DHLOGE("Decoder node occurred error or start release.");
374         return DCAMERA_DISABLE_PROCESS;
375     }
376     inputBuffersQueue_.push(inputBuffers[0]);
377     DHLOGD("Push inputBuf sucess. BufSize %{public}zu, QueueSize %{public}zu.", inputBuffers[0]->Size(),
378         inputBuffersQueue_.size());
379     int32_t err = FeedDecoderInputBuffer();
380     if (err != DCAMERA_OK) {
381         int32_t sleepTimeUs = 5000;
382         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
383         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
384         auto sendFunc = [this]() mutable {
385             int32_t ret = FeedDecoderInputBuffer();
386             DHLOGD("excute FeedDecoderInputBuffer ret %{public}d.", ret);
387         };
388         CHECK_AND_RETURN_RET_LOG(pipeSrcEventHandler_ == nullptr, DCAMERA_BAD_VALUE,
389             "%{public}s", "pipeSrcEventHandler_ is nullptr.");
390         pipeSrcEventHandler_->PostTask(sendFunc);
391     }
392     return DCAMERA_OK;
393 }
394 
BeforeDecodeDump(uint8_t * buffer,size_t bufSize)395 void DecodeDataProcess::BeforeDecodeDump(uint8_t *buffer, size_t bufSize)
396 {
397 #ifdef DUMP_DCAMERA_FILE
398     if (buffer == nullptr) {
399         DHLOGE("dumpsaving : input param nullptr.");
400         return;
401     }
402     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + BEFORE_DECODE) == DCAMERA_OK)) {
403         DumpBufferToFile(DUMP_PATH + BEFORE_DECODE, buffer, bufSize);
404     }
405 #endif
406     return;
407 }
408 
FeedDecoderInputBuffer()409 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
410 {
411     DHLOGD("Feed decoder input buffer.");
412     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
413         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
414         if (buffer == nullptr || availableInputIndexsQueue_.empty() || availableInputBufferQueue_.empty()) {
415             DHLOGE("inputBuffersQueue size %{public}zu, availableInputIndexsQueue size %{public}zu, "
416                 "availableInputBufferQueue size %{public}zu",
417                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size(), availableInputBufferQueue_.size());
418             return DCAMERA_BAD_VALUE;
419         }
420         buffer->frameInfo_.timePonit.startDecode = GetNowTimeStampUs();
421         {
422             std::lock_guard<std::mutex> lock(mtxDequeLock_);
423             frameInfoDeque_.push_back(buffer->frameInfo_);
424         }
425         int64_t timeStamp = buffer->frameInfo_.pts;
426         {
427             std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
428             CHECK_AND_RETURN_RET_LOG(
429                 videoDecoder_ == nullptr, DCAMERA_OK, "The video decoder does not exist before GetInputBuffer.");
430             uint32_t index = availableInputIndexsQueue_.front();
431             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = availableInputBufferQueue_.front();
432             if (sharedMemoryInput == nullptr) {
433                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%{public}u] index.", index);
434                 return DCAMERA_BAD_VALUE;
435             }
436             BeforeDecodeDump(buffer->Data(), buffer->Size());
437             DumpFileUtil::WriteDumpFile(dumpDecBeforeFile_, static_cast<void *>(buffer->Data()), buffer->Size());
438             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
439             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
440             CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "memcpy_s buffer failed.");
441             DHLOGD("Decoder input buffer size %{public}zu, timeStamp %{public}" PRId64"us.", buffer->Size(), timeStamp);
442             MediaAVCodec::AVCodecBufferInfo bufferInfo {timeStamp, static_cast<int32_t>(buffer->Size()), 0};
443             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
444                 MediaAVCodec::AVCODEC_BUFFER_FLAG_NONE);
445             if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
446                 DHLOGE("queue Input buffer failed.");
447                 return DCAMERA_BAD_OPERATE;
448             }
449         }
450 
451         inputBuffersQueue_.pop();
452         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %{public}zu.", inputBuffersQueue_.size());
453 
454         IncreaseWaitDecodeCnt();
455     }
456     return DCAMERA_OK;
457 }
458 
GetDecoderTimeStamp()459 int64_t DecodeDataProcess::GetDecoderTimeStamp()
460 {
461     int64_t TimeIntervalStampUs = 0;
462     int64_t nowTimeUs = GetNowTimeStampUs();
463     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
464         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
465         return TimeIntervalStampUs;
466     }
467     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
468     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
469     return TimeIntervalStampUs;
470 }
471 
IncreaseWaitDecodeCnt()472 void DecodeDataProcess::IncreaseWaitDecodeCnt()
473 {
474     std::lock_guard<std::mutex> lck(mtxHoldCount_);
475     availableInputIndexsQueue_.pop();
476     availableInputBufferQueue_.pop();
477     waitDecoderOutputCount_++;
478     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
479 }
480 
ReduceWaitDecodeCnt()481 void DecodeDataProcess::ReduceWaitDecodeCnt()
482 {
483     std::lock_guard<std::mutex> lck(mtxHoldCount_);
484     if (waitDecoderOutputCount_ <= 0) {
485         DHLOGE("The waitDecoderOutputCount_ = %{public}d.", waitDecoderOutputCount_);
486     }
487     if (outputTimeStampUs_ == 0) {
488         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
489     } else {
490         waitDecoderOutputCount_--;
491     }
492     DHLOGD("Wait decoder output frames number is %{public}d.", waitDecoderOutputCount_);
493 }
494 
OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface> & surface)495 void DecodeDataProcess::OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface>& surface)
496 {
497     auto sendFunc = [this, surface]() mutable {
498         GetDecoderOutputBuffer(surface);
499         DHLOGD("excute GetDecoderOutputBuffer.");
500     };
501     if (decEventHandler_ != nullptr) {
502         decEventHandler_->PostTask(sendFunc);
503     }
504 }
505 
GetDecoderOutputBuffer(const sptr<IConsumerSurface> & surface)506 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<IConsumerSurface>& surface)
507 {
508     DHLOGD("Get decoder output buffer.");
509     if (surface == nullptr) {
510         DHLOGE("Get decode consumer surface failed.");
511         return;
512     }
513     Rect damage = {0, 0, 0, 0};
514     int32_t acquireFence = 0;
515     int64_t timeStamp = 0;
516     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
517     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStamp, damage);
518     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
519         DHLOGE("Acquire surface buffer failed!");
520         return;
521     }
522     int32_t alignedWidth = surfaceBuffer->GetStride();
523     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
524         DHLOGE("surface buffer size or alignedWidth too long");
525         return;
526     }
527     int32_t alignedHeight = alignedHeight_;
528     DHLOGD("OutputBuffer alignedWidth %{public}d, alignedHeight %{public}d, timeStamp %{public}ld ns.",
529         alignedWidth, alignedHeight, timeStamp);
530     CopyDecodedImage(surfaceBuffer, alignedWidth, alignedHeight);
531     surface->ReleaseBuffer(surfaceBuffer, -1);
532     outputTimeStampUs_ = timeStamp;
533     ReduceWaitDecodeCnt();
534 }
535 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)536 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
537     int32_t alignedHeight)
538 {
539     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
540         DHLOGE("Surface output buffer error.");
541         return;
542     }
543 
544     DHLOGD("Convert NV12 to I420, format=%{public}d, width=[%{public}d, %{public}d], height=[%{public}d, %{public}d]",
545         sourceConfig_.GetVideoformat(), sourceConfig_.GetWidth(), alignedWidth, sourceConfig_.GetHeight(),
546         alignedHeight);
547     int srcSizeY = alignedWidth * alignedHeight;
548     uint8_t *srcDataY = static_cast<uint8_t *>(surBuf->GetVirAddr());
549     uint8_t *srcDataUV = static_cast<uint8_t *>(surBuf->GetVirAddr()) + srcSizeY;
550 
551     int dstSizeY = sourceConfig_.GetWidth() * sourceConfig_.GetHeight();
552     int dstSizeUV = (static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV) *
553                     (static_cast<uint32_t>(sourceConfig_.GetHeight()) >> MEMORY_RATIO_UV);
554     std::shared_ptr<DataBuffer> bufferOutput =
555         std::make_shared<DataBuffer>(dstSizeY * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
556     uint8_t *dstDataY = bufferOutput->Data();
557     uint8_t *dstDataU = bufferOutput->Data() + dstSizeY;
558     uint8_t *dstDataV = bufferOutput->Data() + dstSizeY + dstSizeUV;
559     auto converter = ConverterHandle::GetInstance().GetHandle();
560     CHECK_AND_RETURN_LOG(converter.NV12ToI420 == nullptr, "converter is null.");
561     int32_t ret = converter.NV12ToI420(srcDataY, alignedWidth, srcDataUV, alignedWidth,
562         dstDataY, sourceConfig_.GetWidth(),
563         dstDataU, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
564         dstDataV, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
565         processedConfig_.GetWidth(), processedConfig_.GetHeight());
566     if (ret != DCAMERA_OK) {
567         DHLOGE("Convert NV12 to I420 failed.");
568         return;
569     }
570     {
571         std::lock_guard<std::mutex> lock(mtxDequeLock_);
572         bufferOutput->frameInfo_ = frameInfoDeque_.front();
573         frameInfoDeque_.pop_front();
574     }
575     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(Videoformat::YUVI420));
576     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
577     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
578     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
579     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
580 #ifdef DUMP_DCAMERA_FILE
581     std::string fileName = "SourceAfterDecode_width(" + std::to_string(processedConfig_.GetWidth())
582         + ")height(" + std::to_string(processedConfig_.GetHeight()) + ").yuv";
583     if (DcameraHidumper::GetInstance().GetDumpFlag() && (IsUnderDumpMaxSize(DUMP_PATH + fileName) == DCAMERA_OK)) {
584         DumpBufferToFile(DUMP_PATH + fileName, bufferOutput->Data(), bufferOutput->Size());
585     }
586 #endif
587     DumpFileUtil::WriteDumpFile(dumpDecAfterFile_, static_cast<void *>(bufferOutput->Data()), bufferOutput->Size());
588     PostOutputDataBuffers(bufferOutput);
589 }
590 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)591 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
592     int32_t alignedHeight)
593 {
594     if (surBuf == nullptr) {
595         DHLOGE("surface buffer is null!");
596         return false;
597     }
598 
599     size_t yuvImageAlignedSize = static_cast<size_t>(alignedWidth * alignedHeight *
600                                                               YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
601     size_t yuvImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
602                                                        YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
603     size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
604     if (yuvImageAlignedSize > surfaceBufSize || yuvImageAlignedSize < yuvImageSize) {
605         DHLOGE("Buffer size error, yuvImageSize %{public}zu, yuvImageAlignedSize %{public}zu, surBufSize %{public}"
606             PRIu32, yuvImageSize, yuvImageAlignedSize, surBuf->GetSize());
607         return false;
608     }
609     return true;
610 }
611 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)612 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
613 {
614     if (decEventHandler_ == nullptr || outputBuffer == nullptr) {
615         DHLOGE("decEventHandler_ or outputBuffer is null.");
616         return;
617     }
618     auto sendFunc = [this, outputBuffer]() mutable {
619         std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
620         multiDataBuffers.push_back(outputBuffer);
621         int32_t ret = DecodeDone(multiDataBuffers);
622         DHLOGD("excute DecodeDone ret %{public}d.", ret);
623     };
624     if (decEventHandler_ != nullptr) {
625         decEventHandler_->PostTask(sendFunc);
626     }
627     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
628 }
629 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)630 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
631 {
632     DHLOGD("Decoder Done.");
633     if (outputBuffers.empty()) {
634         DHLOGE("The received data buffers is empty.");
635         return DCAMERA_BAD_VALUE;
636     }
637 
638     if (nextDataProcess_ != nullptr) {
639         DHLOGD("Send to the next node of the decoder for processing.");
640         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
641         if (err != DCAMERA_OK) {
642             DHLOGE("Someone node after the decoder processes failed.");
643         }
644         return err;
645     }
646     DHLOGD("The current node is the last node, and Output the processed video buffer");
647     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
648     if (targetPipelineSource == nullptr) {
649         DHLOGE("callbackPipelineSource_ is nullptr.");
650         return DCAMERA_BAD_VALUE;
651     }
652     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
653     return DCAMERA_OK;
654 }
655 
OnError()656 void DecodeDataProcess::OnError()
657 {
658     DHLOGD("DecodeDataProcess : OnError.");
659     isDecoderProcess_.store(false);
660     if (videoDecoder_ != nullptr) {
661         videoDecoder_->Stop();
662     }
663     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
664     if (targetPipelineSource == nullptr) {
665         DHLOGE("callbackPipelineSource_ is nullptr.");
666         return;
667     }
668     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
669 }
670 
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)671 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
672 {
673     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
674     std::lock_guard<std::mutex> lck(mtxHoldCount_);
675     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
676         DHLOGE("Video decoder available indexs queue overflow.");
677         return;
678     }
679     DHLOGD("Video decoder available indexs queue push index [%{public}u].", index);
680     availableInputIndexsQueue_.push(index);
681     availableInputBufferQueue_.push(buffer);
682 }
683 
OnOutputFormatChanged(const Media::Format & format)684 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
685 {
686     if (decodeOutputFormat_.GetFormatMap().empty()) {
687         DHLOGE("The first changed video decoder output format is null.");
688         return;
689     }
690     decodeOutputFormat_ = format;
691 }
692 
OnOutputBufferAvailable(uint32_t index,const MediaAVCodec::AVCodecBufferInfo & info,const MediaAVCodec::AVCodecBufferFlag & flag,std::shared_ptr<Media::AVSharedMemory> buffer)693 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const MediaAVCodec::AVCodecBufferInfo& info,
694     const MediaAVCodec::AVCodecBufferFlag& flag, std::shared_ptr<Media::AVSharedMemory> buffer)
695 {
696     int64_t finishDecodeT = GetNowTimeStampUs();
697     if (!isDecoderProcess_.load()) {
698         DHLOGE("Decoder node occurred error or start release.");
699         return;
700     }
701     DHLOGD("Video decode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, "
702         "flag %{public}d", info.presentationTimeUs, info.size, info.offset, flag);
703     outputInfo_ = info;
704     {
705         std::lock_guard<std::mutex> lock(mtxDequeLock_);
706         AlignFirstFrameTime();
707         for (auto it = frameInfoDeque_.begin(); it != frameInfoDeque_.end(); it++) {
708             DCameraFrameInfo frameInfo = *it;
709             if (frameInfo.timePonit.finishDecode != 0) {
710                 continue;
711             }
712             frameInfo.timePonit.finishDecode = finishDecodeT;
713             frameInfoDeque_.emplace(frameInfoDeque_.erase(it), frameInfo);
714             break;
715         }
716     }
717     {
718         std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
719         if (videoDecoder_ == nullptr) {
720             DHLOGE("The video decoder does not exist before decoding data.");
721             return;
722         }
723         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
724         if (errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
725             DHLOGE("The video decoder output decoded data to surfacebuffer failed, index : [%{public}u].", index);
726         }
727     }
728 }
729 
GetSourceConfig() const730 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
731 {
732     return sourceConfig_;
733 }
734 
GetTargetConfig() const735 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
736 {
737     return targetConfig_;
738 }
739 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)740 int32_t DecodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
741 {
742     return DCAMERA_OK;
743 }
744 
AlignFirstFrameTime()745 void DecodeDataProcess::AlignFirstFrameTime()
746 {
747     if (frameInfoDeque_.empty()) {
748         return;
749     }
750     DCameraFrameInfo frameInfo = frameInfoDeque_.front();
751     if (frameInfo.index != FRAME_HEAD || frameInfo.type != MediaAVCodec::AVCODEC_BUFFER_FLAG_CODEC_DATA) {
752         return;
753     }
754     frameInfoDeque_.pop_front();
755     DCameraFrameInfo front = frameInfoDeque_.front();
756     frameInfo.index = front.index;
757     frameInfo.pts = front.pts;
758     frameInfo.offset = front.offset;
759     frameInfo.type = front.type;
760     frameInfo.ver = front.ver;
761     frameInfo.timePonit.finishEncode = front.timePonit.finishEncode;
762     frameInfoDeque_.emplace(frameInfoDeque_.erase(frameInfoDeque_.begin()), frameInfo);
763 }
764 } // namespace DistributedHardware
765 } // namespace OHOS
766