• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 #include "distributed_camera_constants.h"
18 #include "distributed_hardware_log.h"
19 #include "graphic_common_c.h"
20 
21 #include "dcamera_hisysevent_adapter.h"
22 #include "dcamera_utils_tools.h"
23 #include "decode_surface_listener.h"
24 #include "decode_video_callback.h"
25 
26 namespace OHOS {
27 namespace DistributedHardware {
28 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
29     "YUVI420", "NV12", "NV21", "RGBA_8888"
30 };
31 
~DecodeDataProcess()32 DecodeDataProcess::~DecodeDataProcess()
33 {
34     if (isDecoderProcess_.load()) {
35         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
36         ReleaseProcessNode();
37     }
38 }
39 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)40 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
41     VideoConfigParams& processedConfig)
42 {
43     DHLOGD("Init DCamera DecodeNode start.");
44     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
45         DHLOGE("Source config or target config are invalid.");
46         return DCAMERA_BAD_VALUE;
47     }
48     if (!IsConvertible(sourceConfig, targetConfig)) {
49         DHLOGE("The DecodeNode can't convert %d to %d.", sourceConfig.GetVideoCodecType(),
50             targetConfig_.GetVideoCodecType());
51         return DCAMERA_BAD_TYPE;
52     }
53 
54     sourceConfig_ = sourceConfig;
55     targetConfig_ = targetConfig;
56     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
57         DHLOGD("Disable DecodeNode. The target video codec type %d is the same as the source video codec type %d.",
58             targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType());
59         processedConfig_ = sourceConfig;
60         processedConfig = processedConfig_;
61         isDecoderProcess_.store(true);
62         return DCAMERA_OK;
63     }
64 
65     InitCodecEvent();
66     int32_t err = InitDecoder();
67     if (err != DCAMERA_OK) {
68         DHLOGE("Init video decoder failed.");
69         ReleaseProcessNode();
70         return err;
71     }
72     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
73     processedConfig = processedConfig_;
74     isDecoderProcess_.store(true);
75     return DCAMERA_OK;
76 }
77 
IsInDecoderRange(const VideoConfigParams & curConfig)78 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
79 {
80     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
81         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
82         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
83 }
84 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)85 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
86 {
87     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
88         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
89 }
90 
InitCodecEvent()91 void DecodeDataProcess::InitCodecEvent()
92 {
93     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
94     eventBusDecode_ = std::make_shared<EventBus>("DeDtProcHandler");
95     DCameraCodecEvent codecEvent(*this, std::make_shared<CodecPacket>());
96     eventBusRegHandleDecode_ = eventBusDecode_->AddHandler<DCameraCodecEvent>(codecEvent.GetType(), *this);
97 
98     DHLOGD("Add handler for DCamera pipeline eventBus.");
99     eventBusRegHandlePipeline2Decode_ = eventBusPipeline_->AddHandler<DCameraCodecEvent>(codecEvent.GetType(), *this);
100 }
101 
InitDecoder()102 int32_t DecodeDataProcess::InitDecoder()
103 {
104     DHLOGD("Init video decoder.");
105     int32_t ret = ConfigureVideoDecoder();
106     if (ret != DCAMERA_OK) {
107         DHLOGE("Init video decoder metadata format failed.");
108         return ret;
109     }
110 
111     ret = StartVideoDecoder();
112     if (ret != DCAMERA_OK) {
113         DHLOGE("Start Video decoder failed.");
114         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
115             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s", sourceConfig_.GetWidth(),
116             sourceConfig_.GetHeight(),
117             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
118         return ret;
119     }
120     return DCAMERA_OK;
121 }
122 
ConfigureVideoDecoder()123 int32_t DecodeDataProcess::ConfigureVideoDecoder()
124 {
125     int32_t ret = InitDecoderMetadataFormat();
126     if (ret != DCAMERA_OK) {
127         DHLOGE("Init video decoder metadata format failed. Error code %d.", ret);
128         return ret;
129     }
130 
131     videoDecoder_ = Media::VideoDecoderFactory::CreateByMime(processType_);
132     if (videoDecoder_ == nullptr) {
133         DHLOGE("Create video decoder failed.");
134         return DCAMERA_INIT_ERR;
135     }
136     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
137     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
138     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
139         DHLOGE("Set video decoder callback failed. Error code %d.", ret);
140         return DCAMERA_INIT_ERR;
141     }
142 
143     ret = videoDecoder_->Configure(metadataFormat_);
144     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
145         DHLOGE("Set video decoder metadata format failed. Error code %d.", ret);
146         return DCAMERA_INIT_ERR;
147     }
148 
149     ret = SetDecoderOutputSurface();
150     if (ret != DCAMERA_OK) {
151         DHLOGE("Set decoder output surface failed. Error code %d.", ret);
152         return ret;
153     }
154 
155     return DCAMERA_OK;
156 }
157 
InitDecoderMetadataFormat()158 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
159 {
160     DHLOGI("Init video decoder metadata format. codecType: %d", sourceConfig_.GetVideoCodecType());
161     processedConfig_ = sourceConfig_;
162     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
163     switch (sourceConfig_.GetVideoCodecType()) {
164         case VideoCodecType::CODEC_H264:
165             processType_ = "video/avc";
166             processedConfig_.SetVideoformat(Videoformat::NV12);
167             break;
168         case VideoCodecType::CODEC_H265:
169             processType_ = "video/hevc";
170             processedConfig_.SetVideoformat(Videoformat::NV12);
171             break;
172         case VideoCodecType::CODEC_MPEG4_ES:
173             processType_ = "video/mp4v-es";
174             break;
175         default:
176             DHLOGE("The current codec type does not support decoding.");
177             return DCAMERA_NOT_FOUND;
178     }
179 
180     DHLOGI("Init video decoder metadata format. videoformat: %d", processedConfig_.GetVideoformat());
181     switch (processedConfig_.GetVideoformat()) {
182         case Videoformat::YUVI420:
183             metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::YUVI420);
184             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
185             break;
186         case Videoformat::NV12:
187             metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12);
188             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
189             break;
190         case Videoformat::NV21:
191             metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV21);
192             metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
193             break;
194         case Videoformat::RGBA_8888:
195             metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::RGBA);
196             metadataFormat_.PutIntValue("max_input_size", MAX_RGB32_BUFFER_SIZE);
197             break;
198         default:
199             DHLOGE("The current pixel format does not support encoding.");
200             return DCAMERA_NOT_FOUND;
201     }
202 
203     metadataFormat_.PutStringValue("codec_mime", processType_);
204     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
205     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
206     metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE);
207 
208     return DCAMERA_OK;
209 }
210 
SetDecoderOutputSurface()211 int32_t DecodeDataProcess::SetDecoderOutputSurface()
212 {
213     DHLOGD("Set the video decoder output surface.");
214     if (videoDecoder_ == nullptr) {
215         DHLOGE("The video decoder is null.");
216         return DCAMERA_BAD_VALUE;
217     }
218 
219     decodeConsumerSurface_ = IConsumerSurface::Create();
220     if (decodeConsumerSurface_ == nullptr) {
221         DHLOGE("Create the decode consumer surface failed.");
222         return DCAMERA_INIT_ERR;
223     }
224     decodeConsumerSurface_->SetDefaultWidthAndHeight(static_cast<int32_t>(sourceConfig_.GetWidth()),
225         static_cast<int32_t>(sourceConfig_.GetHeight()));
226     decodeSurfaceListener_ = new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this());
227     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
228         SURFACE_ERROR_OK) {
229         DHLOGE("Register consumer listener failed.");
230         return DCAMERA_INIT_ERR;
231     }
232 
233     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
234     if (surfaceProducer == nullptr) {
235         DHLOGE("Get the surface producer of the decode consumer surface failed.");
236         return DCAMERA_INIT_ERR;
237     }
238     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
239     if (decodeProducerSurface_ == nullptr) {
240         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
241         return DCAMERA_INIT_ERR;
242     }
243 
244     DHLOGD("Set the producer surface to video decoder output surface.");
245     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
246     if (err != Media::MediaServiceErrCode::MSERR_OK) {
247         DHLOGE("Set decoder output surface failed.");
248         return DCAMERA_INIT_ERR;
249     }
250     return DCAMERA_OK;
251 }
252 
StartVideoDecoder()253 int32_t DecodeDataProcess::StartVideoDecoder()
254 {
255     if (videoDecoder_ == nullptr) {
256         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
257         return DCAMERA_BAD_VALUE;
258     }
259 
260     int32_t ret = videoDecoder_->Prepare();
261     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
262         DHLOGE("Video decoder prepare failed. Error code %d.", ret);
263         return DCAMERA_INIT_ERR;
264     }
265     ret = videoDecoder_->Start();
266     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
267         DHLOGE("Video decoder start failed. Error code %d.", ret);
268         return DCAMERA_INIT_ERR;
269     }
270     return DCAMERA_OK;
271 }
272 
StopVideoDecoder()273 int32_t DecodeDataProcess::StopVideoDecoder()
274 {
275     if (videoDecoder_ == nullptr) {
276         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
277         return DCAMERA_BAD_VALUE;
278     }
279 
280     bool isSuccess = true;
281     int32_t ret = videoDecoder_->Flush();
282     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
283         DHLOGE("VideoDecoder flush failed. Error type: %d.", ret);
284         isSuccess = isSuccess && false;
285     }
286     ret = videoDecoder_->Stop();
287     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
288         DHLOGE("VideoDecoder stop failed. Error type: %d.", ret);
289         isSuccess = isSuccess && false;
290     }
291     if (!isSuccess) {
292         return DCAMERA_BAD_OPERATE;
293     }
294     return DCAMERA_OK;
295 }
296 
ReleaseVideoDecoder()297 void DecodeDataProcess::ReleaseVideoDecoder()
298 {
299     DHLOGD("Start release videoDecoder.");
300     std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
301     std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
302     if (videoDecoder_ == nullptr) {
303         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
304         decodeVideoCallback_ = nullptr;
305         return;
306     }
307     int32_t ret = StopVideoDecoder();
308     if (ret != DCAMERA_OK) {
309         DHLOGE("StopVideoDecoder failed.");
310     }
311     ret = videoDecoder_->Release();
312     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
313         DHLOGE("VideoDecoder release failed. Error type: %d.", ret);
314     }
315     videoDecoder_ = nullptr;
316     decodeVideoCallback_ = nullptr;
317 }
318 
ReleaseDecoderSurface()319 void DecodeDataProcess::ReleaseDecoderSurface()
320 {
321     if (decodeConsumerSurface_ == nullptr) {
322         decodeProducerSurface_ = nullptr;
323         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
324         return;
325     }
326     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
327     if (ret != SURFACE_ERROR_OK) {
328         DHLOGE("Unregister consumer listener failed. Error type: %d.", ret);
329     }
330     decodeConsumerSurface_ = nullptr;
331     decodeProducerSurface_ = nullptr;
332 }
333 
ReleaseCodecEvent()334 void DecodeDataProcess::ReleaseCodecEvent()
335 {
336     DCameraCodecEvent codecEvent(*this, std::make_shared<CodecPacket>());
337     if (eventBusDecode_ != nullptr) {
338         eventBusDecode_->RemoveHandler<DCameraCodecEvent>(codecEvent.GetType(), eventBusRegHandleDecode_);
339         eventBusRegHandleDecode_ = nullptr;
340         eventBusDecode_ = nullptr;
341     }
342     if (eventBusPipeline_ != nullptr) {
343         eventBusPipeline_->RemoveHandler<DCameraCodecEvent>(codecEvent.GetType(), eventBusRegHandlePipeline2Decode_);
344         eventBusRegHandlePipeline2Decode_ = nullptr;
345         eventBusPipeline_ = nullptr;
346     }
347     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
348 }
349 
ReleaseProcessNode()350 void DecodeDataProcess::ReleaseProcessNode()
351 {
352     DHLOGD("Start release [%d] node : DecodeNode.", nodeRank_);
353     isDecoderProcess_.store(false);
354     ReleaseVideoDecoder();
355     ReleaseDecoderSurface();
356     ReleaseCodecEvent();
357 
358     processType_ = "";
359     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
360     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
361     std::deque<DCameraFrameInfo>().swap(frameInfoDeque_);
362     waitDecoderOutputCount_ = 0;
363     lastFeedDecoderInputBufferTimeUs_ = 0;
364     outputTimeStampUs_ = 0;
365     alignedHeight_ = 0;
366 
367     if (nextDataProcess_ != nullptr) {
368         nextDataProcess_->ReleaseProcessNode();
369         nextDataProcess_ = nullptr;
370     }
371     DHLOGD("Release [%d] node : DecodeNode end.", nodeRank_);
372 }
373 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)374 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
375 {
376     DHLOGD("Process data in DecodeDataProcess.");
377     if (inputBuffers.empty()) {
378         DHLOGE("The input data buffers is empty.");
379         return DCAMERA_BAD_VALUE;
380     }
381     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
382         DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.",
383             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
384         return DecodeDone(inputBuffers);
385     }
386 
387     if (videoDecoder_ == nullptr) {
388         DHLOGE("The video decoder does not exist before decoding data.");
389         return DCAMERA_INIT_ERR;
390     }
391     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
392         DHLOGE("video decoder input buffers queue over flow.");
393         return DCAMERA_INDEX_OVERFLOW;
394     }
395     if (inputBuffers[0]->Size() > MAX_RGB32_BUFFER_SIZE) {
396         DHLOGE("DecodeNode input buffer size %zu error.", inputBuffers[0]->Size());
397         return DCAMERA_MEMORY_OPT_ERROR;
398     }
399     if (!isDecoderProcess_.load()) {
400         DHLOGE("Decoder node occurred error or start release.");
401         return DCAMERA_DISABLE_PROCESS;
402     }
403     inputBuffersQueue_.push(inputBuffers[0]);
404     DHLOGD("Push inputBuf sucess. BufSize %zu, QueueSize %zu.", inputBuffers[0]->Size(), inputBuffersQueue_.size());
405     int32_t err = FeedDecoderInputBuffer();
406     if (err != DCAMERA_OK) {
407         int32_t sleepTimeUs = 5000;
408         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
409         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
410         std::shared_ptr<CodecPacket> reFeedInputPacket = std::make_shared<CodecPacket>();
411         reFeedInputPacket->SetVideoCodecType(sourceConfig_.GetVideoCodecType());
412         DCameraCodecEvent dCamCodecEv(*this, reFeedInputPacket, VideoCodecAction::ACTION_ONCE_AGAIN);
413         if (eventBusPipeline_ == nullptr) {
414             DHLOGE("eventBusPipeline_ is nullptr.");
415             return DCAMERA_BAD_VALUE;
416         }
417         eventBusPipeline_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
418     }
419     return DCAMERA_OK;
420 }
421 
FeedDecoderInputBuffer()422 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
423 {
424     DHLOGD("Feed decoder input buffer.");
425     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
426         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
427         if (buffer == nullptr || availableInputIndexsQueue_.empty()) {
428             DHLOGE("inputBuffersQueue size %zu, availableInputIndexsQueue size %zu.",
429                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size());
430             return DCAMERA_BAD_VALUE;
431         }
432         buffer->frameInfo_.timePonit.startDecode = GetNowTimeStampUs();
433         {
434             std::lock_guard<std::mutex> lock(mtxDequeLock_);
435             frameInfoDeque_.push_back(buffer->frameInfo_);
436         }
437         int64_t timeStamp = buffer->frameInfo_.pts;
438         {
439             std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
440             if (videoDecoder_ == nullptr) {
441                 DHLOGE("The video decoder does not exist before GetInputBuffer.");
442                 return DCAMERA_OK;
443             }
444             uint32_t index = availableInputIndexsQueue_.front();
445             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = videoDecoder_->GetInputBuffer(index);
446             if (sharedMemoryInput == nullptr) {
447                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%u] index.", index);
448                 return DCAMERA_BAD_VALUE;
449             }
450             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
451             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
452             if (err != EOK) {
453                 DHLOGE("memcpy_s buffer failed.");
454                 return DCAMERA_MEMORY_OPT_ERROR;
455             }
456             DHLOGD("Decoder input buffer size %zu, timeStamp %ld us.", buffer->Size(), timeStamp);
457             Media::AVCodecBufferInfo bufferInfo {timeStamp, static_cast<int32_t>(buffer->Size()), 0};
458             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
459                 Media::AVCODEC_BUFFER_FLAG_NONE);
460             if (ret != Media::MediaServiceErrCode::MSERR_OK) {
461                 DHLOGE("queue Input buffer failed.");
462                 return DCAMERA_BAD_OPERATE;
463             }
464         }
465 
466         inputBuffersQueue_.pop();
467         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %d.", inputBuffersQueue_.size());
468 
469         IncreaseWaitDecodeCnt();
470     }
471     return DCAMERA_OK;
472 }
473 
GetDecoderTimeStamp()474 int64_t DecodeDataProcess::GetDecoderTimeStamp()
475 {
476     int64_t TimeIntervalStampUs = 0;
477     int64_t nowTimeUs = GetNowTimeStampUs();
478     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
479         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
480         return TimeIntervalStampUs;
481     }
482     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
483     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
484     return TimeIntervalStampUs;
485 }
486 
IncreaseWaitDecodeCnt()487 void DecodeDataProcess::IncreaseWaitDecodeCnt()
488 {
489     std::lock_guard<std::mutex> lck(mtxHoldCount_);
490     availableInputIndexsQueue_.pop();
491     waitDecoderOutputCount_++;
492     DHLOGD("Wait decoder output frames number is %d.", waitDecoderOutputCount_);
493 }
494 
ReduceWaitDecodeCnt()495 void DecodeDataProcess::ReduceWaitDecodeCnt()
496 {
497     std::lock_guard<std::mutex> lck(mtxHoldCount_);
498     if (waitDecoderOutputCount_ <= 0) {
499         DHLOGE("The waitDecoderOutputCount_ = %d.", waitDecoderOutputCount_);
500     }
501     if (outputTimeStampUs_ == 0) {
502         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
503     } else {
504         waitDecoderOutputCount_--;
505     }
506     DHLOGD("Wait decoder output frames number is %d.", waitDecoderOutputCount_);
507 }
508 
OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface> & surface)509 void DecodeDataProcess::OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface>& surface)
510 {
511     std::shared_ptr<CodecPacket> bufferPkt = std::make_shared<CodecPacket>(surface);
512     DCameraCodecEvent dCamCodecEv(*this, bufferPkt, VideoCodecAction::ACTION_GET_DECODER_OUTPUT_BUFFER);
513     eventBusDecode_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
514 }
515 
GetDecoderOutputBuffer(const sptr<IConsumerSurface> & surface)516 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<IConsumerSurface>& surface)
517 {
518     DHLOGD("Get decoder output buffer.");
519     if (surface == nullptr) {
520         DHLOGE("Get decode consumer surface failed.");
521         return;
522     }
523     Rect damage = {0, 0, 0, 0};
524     int32_t acquireFence = 0;
525     int64_t timeStamp = 0;
526     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
527     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStamp, damage);
528     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
529         DHLOGE("Acquire surface buffer failed!");
530         return;
531     }
532     int32_t alignedWidth = surfaceBuffer->GetStride();
533     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
534         DHLOGE("surface buffer size or alignedWidth too long");
535         return;
536     }
537     int32_t alignedHeight = alignedHeight_;
538     DHLOGD("OutputBuffer alignedWidth %d, alignedHeight %d, timeStamp %ld ns.",
539         alignedWidth, alignedHeight, timeStamp);
540     CopyDecodedImage(surfaceBuffer, alignedWidth, alignedHeight);
541     surface->ReleaseBuffer(surfaceBuffer, -1);
542     outputTimeStampUs_ = timeStamp;
543     ReduceWaitDecodeCnt();
544 }
545 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)546 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
547     int32_t alignedHeight)
548 {
549     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
550         DHLOGE("Surface output buffer error.");
551         return;
552     }
553 
554     size_t rgbImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
555         RGB32_MEMORY_COEFFICIENT);
556     std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(rgbImageSize);
557     uint8_t *addr = static_cast<uint8_t *>(surBuf->GetVirAddr());
558     errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(), addr, rgbImageSize);
559     if (err != EOK) {
560         DHLOGE("memcpy_s surface buffer failed.");
561         return;
562     }
563     {
564         std::lock_guard<std::mutex> lock(mtxDequeLock_);
565         bufferOutput->frameInfo_ = frameInfoDeque_.front();
566         frameInfoDeque_.pop_front();
567     }
568     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(processedConfig_.GetVideoformat()));
569     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
570     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
571     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
572     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
573 
574     PostOutputDataBuffers(bufferOutput);
575 }
576 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)577 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
578     int32_t alignedHeight)
579 {
580     if (surBuf == nullptr) {
581         DHLOGE("surface buffer is null!");
582         return false;
583     }
584 
585     size_t rgbImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
586         RGB32_MEMORY_COEFFICIENT);
587     size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
588     if (rgbImageSize > surfaceBufSize) {
589         DHLOGE("Buffer size error, rgbImageSize %d, surBufSize %d.", rgbImageSize, surBuf->GetSize());
590         return false;
591     }
592     return true;
593 }
594 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)595 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
596 {
597     if (eventBusDecode_ == nullptr || outputBuffer == nullptr) {
598         DHLOGE("eventBusDecode_ or outputBuffer is null.");
599         return;
600     }
601     std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
602     multiDataBuffers.push_back(outputBuffer);
603     std::shared_ptr<CodecPacket> transNextNodePacket = std::make_shared<CodecPacket>(VideoCodecType::NO_CODEC,
604         multiDataBuffers);
605     DCameraCodecEvent dCamCodecEv(*this, transNextNodePacket, VideoCodecAction::NO_ACTION);
606     eventBusDecode_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
607     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
608 }
609 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)610 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
611 {
612     DHLOGD("Decoder Done.");
613     if (outputBuffers.empty()) {
614         DHLOGE("The received data buffers is empty.");
615         return DCAMERA_BAD_VALUE;
616     }
617 
618     if (nextDataProcess_ != nullptr) {
619         DHLOGD("Send to the next node of the decoder for processing.");
620         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
621         if (err != DCAMERA_OK) {
622             DHLOGE("Someone node after the decoder processes failed.");
623         }
624         return err;
625     }
626     DHLOGD("The current node is the last node, and Output the processed video buffer");
627     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
628     if (targetPipelineSource == nullptr) {
629         DHLOGE("callbackPipelineSource_ is nullptr.");
630         return DCAMERA_BAD_VALUE;
631     }
632     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
633     return DCAMERA_OK;
634 }
635 
OnEvent(DCameraCodecEvent & ev)636 void DecodeDataProcess::OnEvent(DCameraCodecEvent& ev)
637 {
638     DHLOGD("Receiving asynchronous DCameraCodecEvents.");
639     std::shared_ptr<CodecPacket> receivedCodecPacket = ev.GetCodecPacket();
640     VideoCodecAction action = ev.GetAction();
641     switch (action) {
642         case VideoCodecAction::NO_ACTION: {
643             if (receivedCodecPacket == nullptr) {
644                 DHLOGE("the received codecPacket of action [%d] is null.", action);
645                 OnError();
646                 return;
647             }
648             std::vector<std::shared_ptr<DataBuffer>> dataBuffers = receivedCodecPacket->GetDataBuffers();
649             DecodeDone(dataBuffers);
650             break;
651         }
652         case VideoCodecAction::ACTION_ONCE_AGAIN:
653             DHLOGD("Try FeedDecoderInputBuffer again.");
654             FeedDecoderInputBuffer();
655             return;
656         case VideoCodecAction::ACTION_GET_DECODER_OUTPUT_BUFFER:
657             if (receivedCodecPacket == nullptr) {
658                 DHLOGE("the received codecPacket of action [%d] is null.", action);
659                 OnError();
660                 return;
661             }
662             GetDecoderOutputBuffer(receivedCodecPacket->GetSurface());
663             break;
664         default:
665             DHLOGD("The action : %d is not supported.", action);
666             return;
667     }
668 }
669 
OnError()670 void DecodeDataProcess::OnError()
671 {
672     DHLOGD("DecodeDataProcess : OnError.");
673     isDecoderProcess_.store(false);
674     if (videoDecoder_ != nullptr) {
675         videoDecoder_->Stop();
676     }
677     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
678     if (targetPipelineSource == nullptr) {
679         DHLOGE("callbackPipelineSource_ is nullptr.");
680         return;
681     }
682     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
683 }
684 
OnInputBufferAvailable(uint32_t index)685 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index)
686 {
687     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
688     std::lock_guard<std::mutex> lck(mtxHoldCount_);
689     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
690         DHLOGE("Video decoder available indexs queue overflow.");
691         return;
692     }
693     DHLOGD("Video decoder available indexs queue push index [%u].", index);
694     availableInputIndexsQueue_.push(index);
695 }
696 
OnOutputFormatChanged(const Media::Format & format)697 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
698 {
699     if (decodeOutputFormat_.GetFormatMap().empty()) {
700         DHLOGE("The first changed video decoder output format is null.");
701         return;
702     }
703     decodeOutputFormat_ = format;
704 }
705 
OnOutputBufferAvailable(uint32_t index,const Media::AVCodecBufferInfo & info,const Media::AVCodecBufferFlag & flag)706 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const Media::AVCodecBufferInfo& info,
707     const Media::AVCodecBufferFlag& flag)
708 {
709     int64_t finishDecodeT = GetNowTimeStampUs();
710     if (!isDecoderProcess_.load()) {
711         DHLOGE("Decoder node occurred error or start release.");
712         return;
713     }
714     DHLOGD("Video decode buffer info: presentation TimeUs %lld, size %d, offset %d, flag %d",
715         info.presentationTimeUs, info.size, info.offset, flag);
716     outputInfo_ = info;
717     {
718         std::lock_guard<std::mutex> lock(mtxDequeLock_);
719         AlignFirstFrameTime();
720         for (auto it = frameInfoDeque_.begin(); it != frameInfoDeque_.end(); it++) {
721             DCameraFrameInfo frameInfo = *it;
722             if (frameInfo.timePonit.finishDecode != 0) {
723                 continue;
724             }
725             frameInfo.timePonit.finishDecode = finishDecodeT;
726             frameInfoDeque_.emplace(frameInfoDeque_.erase(it), frameInfo);
727             break;
728         }
729     }
730     {
731         std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
732         if (videoDecoder_ == nullptr) {
733             DHLOGE("The video decoder does not exist before decoding data.");
734             return;
735         }
736         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
737         if (errRelease != Media::MediaServiceErrCode::MSERR_OK) {
738             DHLOGE("The video decoder output decoded data to surface failed, index : [%u].", index);
739         }
740     }
741 }
742 
GetSourceConfig() const743 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
744 {
745     return sourceConfig_;
746 }
747 
GetTargetConfig() const748 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
749 {
750     return targetConfig_;
751 }
752 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)753 int32_t DecodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
754 {
755     return DCAMERA_OK;
756 }
757 
AlignFirstFrameTime()758 void DecodeDataProcess::AlignFirstFrameTime()
759 {
760     if (frameInfoDeque_.empty()) {
761         return;
762     }
763     DCameraFrameInfo frameInfo = frameInfoDeque_.front();
764     if (frameInfo.index != FRAME_HEAD || frameInfo.type != Media::AVCODEC_BUFFER_FLAG_CODEC_DATA) {
765         return;
766     }
767     frameInfoDeque_.pop_front();
768     DCameraFrameInfo front = frameInfoDeque_.front();
769     frameInfo.index = front.index;
770     frameInfo.pts = front.pts;
771     frameInfo.offset = front.offset;
772     frameInfo.type = front.type;
773     frameInfo.ver = front.ver;
774     frameInfo.timePonit.finishEncode = front.timePonit.finishEncode;
775     frameInfoDeque_.emplace(frameInfoDeque_.erase(frameInfoDeque_.begin()), frameInfo);
776 }
777 } // namespace DistributedHardware
778 } // namespace OHOS
779