• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 
18 #include "libyuv.h"
19 #include "distributed_camera_constants.h"
20 #include "distributed_hardware_log.h"
21 #include "dcamera_hisysevent_adapter.h"
22 #include "dcamera_utils_tools.h"
23 #include "decode_surface_listener.h"
24 #include "decode_video_callback.h"
25 #include "graphic_common_c.h"
26 
27 namespace OHOS {
28 namespace DistributedHardware {
29 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
30     "YUVI420", "NV12", "NV21", "RGBA_8888"
31 };
32 
~DecodeDataProcess()33 DecodeDataProcess::~DecodeDataProcess()
34 {
35     if (isDecoderProcess_.load()) {
36         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
37         ReleaseProcessNode();
38     }
39 }
40 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)41 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
42     VideoConfigParams& processedConfig)
43 {
44     DHLOGD("Init DCamera DecodeNode start.");
45     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
46         DHLOGE("Source config or target config are invalid.");
47         return DCAMERA_BAD_VALUE;
48     }
49     if (!IsConvertible(sourceConfig, targetConfig)) {
50         DHLOGE("The DecodeNode can't convert %d to %d.", sourceConfig.GetVideoCodecType(),
51             targetConfig_.GetVideoCodecType());
52         return DCAMERA_BAD_TYPE;
53     }
54 
55     sourceConfig_ = sourceConfig;
56     targetConfig_ = targetConfig;
57     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
58         DHLOGD("Disable DecodeNode. The target video codec type %d is the same as the source video codec type %d.",
59             targetConfig_.GetVideoCodecType(), sourceConfig_.GetVideoCodecType());
60         processedConfig_ = sourceConfig;
61         processedConfig = processedConfig_;
62         isDecoderProcess_.store(true);
63         return DCAMERA_OK;
64     }
65 
66     InitCodecEvent();
67     int32_t err = InitDecoder();
68     if (err != DCAMERA_OK) {
69         DHLOGE("Init video decoder failed.");
70         ReleaseProcessNode();
71         return err;
72     }
73     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
74     processedConfig = processedConfig_;
75     isDecoderProcess_.store(true);
76     return DCAMERA_OK;
77 }
78 
IsInDecoderRange(const VideoConfigParams & curConfig)79 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
80 {
81     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
82         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
83         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
84 }
85 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)86 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
87 {
88     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
89         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
90 }
91 
InitCodecEvent()92 void DecodeDataProcess::InitCodecEvent()
93 {
94     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
95     eventBusDecode_ = std::make_shared<EventBus>("DeDtProcHandler");
96     DCameraCodecEvent codecEvent(*this, std::make_shared<CodecPacket>());
97     eventBusRegHandleDecode_ = eventBusDecode_->AddHandler<DCameraCodecEvent>(codecEvent.GetType(), *this);
98 
99     DHLOGD("Add handler for DCamera pipeline eventBus.");
100     eventBusRegHandlePipeline2Decode_ = eventBusPipeline_->AddHandler<DCameraCodecEvent>(codecEvent.GetType(), *this);
101 }
102 
InitDecoder()103 int32_t DecodeDataProcess::InitDecoder()
104 {
105     DHLOGD("Init video decoder.");
106     int32_t ret = ConfigureVideoDecoder();
107     if (ret != DCAMERA_OK) {
108         DHLOGE("Init video decoder metadata format failed.");
109         return ret;
110     }
111 
112     ret = StartVideoDecoder();
113     if (ret != DCAMERA_OK) {
114         DHLOGE("Start Video decoder failed.");
115         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
116             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s", sourceConfig_.GetWidth(),
117             sourceConfig_.GetHeight(),
118             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
119         return ret;
120     }
121     return DCAMERA_OK;
122 }
123 
ConfigureVideoDecoder()124 int32_t DecodeDataProcess::ConfigureVideoDecoder()
125 {
126     int32_t ret = InitDecoderMetadataFormat();
127     if (ret != DCAMERA_OK) {
128         DHLOGE("Init video decoder metadata format failed. Error code %d.", ret);
129         return ret;
130     }
131 
132     videoDecoder_ = Media::VideoDecoderFactory::CreateByMime(processType_);
133     if (videoDecoder_ == nullptr) {
134         DHLOGE("Create video decoder failed.");
135         return DCAMERA_INIT_ERR;
136     }
137     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
138     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
139     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
140         DHLOGE("Set video decoder callback failed. Error code %d.", ret);
141         return DCAMERA_INIT_ERR;
142     }
143 
144     ret = videoDecoder_->Configure(metadataFormat_);
145     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
146         DHLOGE("Set video decoder metadata format failed. Error code %d.", ret);
147         return DCAMERA_INIT_ERR;
148     }
149 
150     ret = SetDecoderOutputSurface();
151     if (ret != DCAMERA_OK) {
152         DHLOGE("Set decoder output surface failed. Error code %d.", ret);
153         return ret;
154     }
155 
156     return DCAMERA_OK;
157 }
158 
InitDecoderMetadataFormat()159 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
160 {
161     DHLOGI("Init video decoder metadata format. codecType: %d", sourceConfig_.GetVideoCodecType());
162     processedConfig_ = sourceConfig_;
163     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
164     switch (sourceConfig_.GetVideoCodecType()) {
165         case VideoCodecType::CODEC_H264:
166             processType_ = "video/avc";
167             processedConfig_.SetVideoformat(Videoformat::NV12);
168             break;
169         case VideoCodecType::CODEC_H265:
170             processType_ = "video/hevc";
171             processedConfig_.SetVideoformat(Videoformat::NV12);
172             break;
173         case VideoCodecType::CODEC_MPEG4_ES:
174             processType_ = "video/mp4v-es";
175             break;
176         default:
177             DHLOGE("The current codec type does not support decoding.");
178             return DCAMERA_NOT_FOUND;
179     }
180 
181     metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12);
182     metadataFormat_.PutStringValue("codec_mime", processType_);
183     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
184     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
185     metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE);
186 
187     return DCAMERA_OK;
188 }
189 
SetDecoderOutputSurface()190 int32_t DecodeDataProcess::SetDecoderOutputSurface()
191 {
192     DHLOGD("Set the video decoder output surface.");
193     if (videoDecoder_ == nullptr) {
194         DHLOGE("The video decoder is null.");
195         return DCAMERA_BAD_VALUE;
196     }
197 
198     decodeConsumerSurface_ = IConsumerSurface::Create();
199     if (decodeConsumerSurface_ == nullptr) {
200         DHLOGE("Create the decode consumer surface failed.");
201         return DCAMERA_INIT_ERR;
202     }
203     decodeConsumerSurface_->SetDefaultWidthAndHeight(static_cast<int32_t>(sourceConfig_.GetWidth()),
204         static_cast<int32_t>(sourceConfig_.GetHeight()));
205     decodeSurfaceListener_ = new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this());
206     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
207         SURFACE_ERROR_OK) {
208         DHLOGE("Register consumer listener failed.");
209         return DCAMERA_INIT_ERR;
210     }
211 
212     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
213     if (surfaceProducer == nullptr) {
214         DHLOGE("Get the surface producer of the decode consumer surface failed.");
215         return DCAMERA_INIT_ERR;
216     }
217     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
218     if (decodeProducerSurface_ == nullptr) {
219         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
220         return DCAMERA_INIT_ERR;
221     }
222 
223     DHLOGD("Set the producer surface to video decoder output surface.");
224     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
225     if (err != Media::MediaServiceErrCode::MSERR_OK) {
226         DHLOGE("Set decoder output surface failed.");
227         return DCAMERA_INIT_ERR;
228     }
229     return DCAMERA_OK;
230 }
231 
StartVideoDecoder()232 int32_t DecodeDataProcess::StartVideoDecoder()
233 {
234     if (videoDecoder_ == nullptr) {
235         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
236         return DCAMERA_BAD_VALUE;
237     }
238 
239     int32_t ret = videoDecoder_->Prepare();
240     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
241         DHLOGE("Video decoder prepare failed. Error code %d.", ret);
242         return DCAMERA_INIT_ERR;
243     }
244     ret = videoDecoder_->Start();
245     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
246         DHLOGE("Video decoder start failed. Error code %d.", ret);
247         return DCAMERA_INIT_ERR;
248     }
249     return DCAMERA_OK;
250 }
251 
StopVideoDecoder()252 int32_t DecodeDataProcess::StopVideoDecoder()
253 {
254     if (videoDecoder_ == nullptr) {
255         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
256         return DCAMERA_BAD_VALUE;
257     }
258 
259     bool isSuccess = true;
260     int32_t ret = videoDecoder_->Flush();
261     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
262         DHLOGE("VideoDecoder flush failed. Error type: %d.", ret);
263         isSuccess = isSuccess && false;
264     }
265     ret = videoDecoder_->Stop();
266     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
267         DHLOGE("VideoDecoder stop failed. Error type: %d.", ret);
268         isSuccess = isSuccess && false;
269     }
270     if (!isSuccess) {
271         return DCAMERA_BAD_OPERATE;
272     }
273     return DCAMERA_OK;
274 }
275 
ReleaseVideoDecoder()276 void DecodeDataProcess::ReleaseVideoDecoder()
277 {
278     DHLOGD("Start release videoDecoder.");
279     std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
280     std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
281     if (videoDecoder_ == nullptr) {
282         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
283         decodeVideoCallback_ = nullptr;
284         return;
285     }
286     int32_t ret = StopVideoDecoder();
287     if (ret != DCAMERA_OK) {
288         DHLOGE("StopVideoDecoder failed.");
289     }
290     ret = videoDecoder_->Release();
291     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
292         DHLOGE("VideoDecoder release failed. Error type: %d.", ret);
293     }
294     videoDecoder_ = nullptr;
295     decodeVideoCallback_ = nullptr;
296 }
297 
ReleaseDecoderSurface()298 void DecodeDataProcess::ReleaseDecoderSurface()
299 {
300     if (decodeConsumerSurface_ == nullptr) {
301         decodeProducerSurface_ = nullptr;
302         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
303         return;
304     }
305     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
306     if (ret != SURFACE_ERROR_OK) {
307         DHLOGE("Unregister consumer listener failed. Error type: %d.", ret);
308     }
309     decodeConsumerSurface_ = nullptr;
310     decodeProducerSurface_ = nullptr;
311 }
312 
ReleaseCodecEvent()313 void DecodeDataProcess::ReleaseCodecEvent()
314 {
315     DCameraCodecEvent codecEvent(*this, std::make_shared<CodecPacket>());
316     if (eventBusDecode_ != nullptr) {
317         eventBusDecode_->RemoveHandler<DCameraCodecEvent>(codecEvent.GetType(), eventBusRegHandleDecode_);
318         eventBusRegHandleDecode_ = nullptr;
319         eventBusDecode_ = nullptr;
320     }
321     if (eventBusPipeline_ != nullptr) {
322         eventBusPipeline_->RemoveHandler<DCameraCodecEvent>(codecEvent.GetType(), eventBusRegHandlePipeline2Decode_);
323         eventBusRegHandlePipeline2Decode_ = nullptr;
324         eventBusPipeline_ = nullptr;
325     }
326     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
327 }
328 
ReleaseProcessNode()329 void DecodeDataProcess::ReleaseProcessNode()
330 {
331     DHLOGD("Start release [%d] node : DecodeNode.", nodeRank_);
332     isDecoderProcess_.store(false);
333     ReleaseVideoDecoder();
334     ReleaseDecoderSurface();
335     ReleaseCodecEvent();
336 
337     processType_ = "";
338     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
339     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
340     std::deque<DCameraFrameInfo>().swap(frameInfoDeque_);
341     waitDecoderOutputCount_ = 0;
342     lastFeedDecoderInputBufferTimeUs_ = 0;
343     outputTimeStampUs_ = 0;
344     alignedHeight_ = 0;
345 
346     if (nextDataProcess_ != nullptr) {
347         nextDataProcess_->ReleaseProcessNode();
348         nextDataProcess_ = nullptr;
349     }
350     DHLOGD("Release [%d] node : DecodeNode end.", nodeRank_);
351 }
352 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)353 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
354 {
355     DHLOGD("Process data in DecodeDataProcess.");
356     if (inputBuffers.empty()) {
357         DHLOGE("The input data buffers is empty.");
358         return DCAMERA_BAD_VALUE;
359     }
360     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
361         DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.",
362             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
363         return DecodeDone(inputBuffers);
364     }
365 
366     if (videoDecoder_ == nullptr) {
367         DHLOGE("The video decoder does not exist before decoding data.");
368         return DCAMERA_INIT_ERR;
369     }
370     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
371         DHLOGE("video decoder input buffers queue over flow.");
372         return DCAMERA_INDEX_OVERFLOW;
373     }
374     if (inputBuffers[0]->Size() > MAX_YUV420_BUFFER_SIZE) {
375         DHLOGE("DecodeNode input buffer size %zu error.", inputBuffers[0]->Size());
376         return DCAMERA_MEMORY_OPT_ERROR;
377     }
378     if (!isDecoderProcess_.load()) {
379         DHLOGE("Decoder node occurred error or start release.");
380         return DCAMERA_DISABLE_PROCESS;
381     }
382     inputBuffersQueue_.push(inputBuffers[0]);
383     DHLOGD("Push inputBuf sucess. BufSize %zu, QueueSize %zu.", inputBuffers[0]->Size(), inputBuffersQueue_.size());
384     int32_t err = FeedDecoderInputBuffer();
385     if (err != DCAMERA_OK) {
386         int32_t sleepTimeUs = 5000;
387         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
388         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
389         std::shared_ptr<CodecPacket> reFeedInputPacket = std::make_shared<CodecPacket>();
390         reFeedInputPacket->SetVideoCodecType(sourceConfig_.GetVideoCodecType());
391         DCameraCodecEvent dCamCodecEv(*this, reFeedInputPacket, VideoCodecAction::ACTION_ONCE_AGAIN);
392         if (eventBusPipeline_ == nullptr) {
393             DHLOGE("eventBusPipeline_ is nullptr.");
394             return DCAMERA_BAD_VALUE;
395         }
396         eventBusPipeline_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
397     }
398     return DCAMERA_OK;
399 }
400 
FeedDecoderInputBuffer()401 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
402 {
403     DHLOGD("Feed decoder input buffer.");
404     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
405         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
406         if (buffer == nullptr || availableInputIndexsQueue_.empty()) {
407             DHLOGE("inputBuffersQueue size %zu, availableInputIndexsQueue size %zu.",
408                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size());
409             return DCAMERA_BAD_VALUE;
410         }
411         buffer->frameInfo_.timePonit.startDecode = GetNowTimeStampUs();
412         {
413             std::lock_guard<std::mutex> lock(mtxDequeLock_);
414             frameInfoDeque_.push_back(buffer->frameInfo_);
415         }
416         int64_t timeStamp = buffer->frameInfo_.pts;
417         {
418             std::lock_guard<std::mutex> inputLock(mtxDecoderLock_);
419             if (videoDecoder_ == nullptr) {
420                 DHLOGE("The video decoder does not exist before GetInputBuffer.");
421                 return DCAMERA_OK;
422             }
423             uint32_t index = availableInputIndexsQueue_.front();
424             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = videoDecoder_->GetInputBuffer(index);
425             if (sharedMemoryInput == nullptr) {
426                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%u] index.", index);
427                 return DCAMERA_BAD_VALUE;
428             }
429             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
430             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
431             if (err != EOK) {
432                 DHLOGE("memcpy_s buffer failed.");
433                 return DCAMERA_MEMORY_OPT_ERROR;
434             }
435             DHLOGD("Decoder input buffer size %zu, timeStamp %ld us.", buffer->Size(), timeStamp);
436             Media::AVCodecBufferInfo bufferInfo {timeStamp, static_cast<int32_t>(buffer->Size()), 0};
437             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
438                 Media::AVCODEC_BUFFER_FLAG_NONE);
439             if (ret != Media::MediaServiceErrCode::MSERR_OK) {
440                 DHLOGE("queue Input buffer failed.");
441                 return DCAMERA_BAD_OPERATE;
442             }
443         }
444 
445         inputBuffersQueue_.pop();
446         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %d.", inputBuffersQueue_.size());
447 
448         IncreaseWaitDecodeCnt();
449     }
450     return DCAMERA_OK;
451 }
452 
GetDecoderTimeStamp()453 int64_t DecodeDataProcess::GetDecoderTimeStamp()
454 {
455     int64_t TimeIntervalStampUs = 0;
456     int64_t nowTimeUs = GetNowTimeStampUs();
457     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
458         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
459         return TimeIntervalStampUs;
460     }
461     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
462     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
463     return TimeIntervalStampUs;
464 }
465 
IncreaseWaitDecodeCnt()466 void DecodeDataProcess::IncreaseWaitDecodeCnt()
467 {
468     std::lock_guard<std::mutex> lck(mtxHoldCount_);
469     availableInputIndexsQueue_.pop();
470     waitDecoderOutputCount_++;
471     DHLOGD("Wait decoder output frames number is %d.", waitDecoderOutputCount_);
472 }
473 
ReduceWaitDecodeCnt()474 void DecodeDataProcess::ReduceWaitDecodeCnt()
475 {
476     std::lock_guard<std::mutex> lck(mtxHoldCount_);
477     if (waitDecoderOutputCount_ <= 0) {
478         DHLOGE("The waitDecoderOutputCount_ = %d.", waitDecoderOutputCount_);
479     }
480     if (outputTimeStampUs_ == 0) {
481         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
482     } else {
483         waitDecoderOutputCount_--;
484     }
485     DHLOGD("Wait decoder output frames number is %d.", waitDecoderOutputCount_);
486 }
487 
OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface> & surface)488 void DecodeDataProcess::OnSurfaceOutputBufferAvailable(const sptr<IConsumerSurface>& surface)
489 {
490     std::shared_ptr<CodecPacket> bufferPkt = std::make_shared<CodecPacket>(surface);
491     DCameraCodecEvent dCamCodecEv(*this, bufferPkt, VideoCodecAction::ACTION_GET_DECODER_OUTPUT_BUFFER);
492     eventBusDecode_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
493 }
494 
GetDecoderOutputBuffer(const sptr<IConsumerSurface> & surface)495 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<IConsumerSurface>& surface)
496 {
497     DHLOGD("Get decoder output buffer.");
498     if (surface == nullptr) {
499         DHLOGE("Get decode consumer surface failed.");
500         return;
501     }
502     Rect damage = {0, 0, 0, 0};
503     int32_t acquireFence = 0;
504     int64_t timeStamp = 0;
505     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
506     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStamp, damage);
507     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
508         DHLOGE("Acquire surface buffer failed!");
509         return;
510     }
511     int32_t alignedWidth = surfaceBuffer->GetStride();
512     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
513         DHLOGE("surface buffer size or alignedWidth too long");
514         return;
515     }
516     int32_t alignedHeight = alignedHeight_;
517     DHLOGD("OutputBuffer alignedWidth %d, alignedHeight %d, timeStamp %ld ns.",
518         alignedWidth, alignedHeight, timeStamp);
519     CopyDecodedImage(surfaceBuffer, alignedWidth, alignedHeight);
520     surface->ReleaseBuffer(surfaceBuffer, -1);
521     outputTimeStampUs_ = timeStamp;
522     ReduceWaitDecodeCnt();
523 }
524 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)525 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
526     int32_t alignedHeight)
527 {
528     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
529         DHLOGE("Surface output buffer error.");
530         return;
531     }
532 
533     DHLOGD("Convert NV12 to I420, format=%d, width=[%d, %d], height=[%d, %d]", sourceConfig_.GetVideoformat(),
534         sourceConfig_.GetWidth(), alignedWidth, sourceConfig_.GetHeight(), alignedHeight);
535     int srcSizeY = alignedWidth * alignedHeight;
536     uint8_t *srcDataY = static_cast<uint8_t *>(surBuf->GetVirAddr());
537     uint8_t *srcDataUV = static_cast<uint8_t *>(surBuf->GetVirAddr()) + srcSizeY;
538 
539     int dstSizeY = sourceConfig_.GetWidth() * sourceConfig_.GetHeight();
540     int dstSizeUV = (static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV) *
541                     (static_cast<uint32_t>(sourceConfig_.GetHeight()) >> MEMORY_RATIO_UV);
542     std::shared_ptr<DataBuffer> bufferOutput =
543         std::make_shared<DataBuffer>(dstSizeY * YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
544     uint8_t *dstDataY = bufferOutput->Data();
545     uint8_t *dstDataU = bufferOutput->Data() + dstSizeY;
546     uint8_t *dstDataV = bufferOutput->Data() + dstSizeY + dstSizeUV;
547 
548     int32_t ret = libyuv::NV12ToI420(
549         srcDataY, alignedWidth,
550         srcDataUV, alignedWidth,
551         dstDataY, sourceConfig_.GetWidth(),
552         dstDataU, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
553         dstDataV, static_cast<uint32_t>(sourceConfig_.GetWidth()) >> MEMORY_RATIO_UV,
554         processedConfig_.GetWidth(), processedConfig_.GetHeight());
555     if (ret != DCAMERA_OK) {
556         DHLOGE("Convert NV12 to I420 failed.");
557         return;
558     }
559     {
560         std::lock_guard<std::mutex> lock(mtxDequeLock_);
561         bufferOutput->frameInfo_ = frameInfoDeque_.front();
562         frameInfoDeque_.pop_front();
563     }
564     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(Videoformat::YUVI420));
565     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
566     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
567     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
568     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
569 
570     PostOutputDataBuffers(bufferOutput);
571 }
572 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)573 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
574     int32_t alignedHeight)
575 {
576     if (surBuf == nullptr) {
577         DHLOGE("surface buffer is null!");
578         return false;
579     }
580 
581     size_t yuvImageAlignedSize = static_cast<size_t>(alignedWidth * alignedHeight *
582                                                               YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
583     size_t yuvImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
584                                                        YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
585     size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
586     if (yuvImageAlignedSize > surfaceBufSize || yuvImageAlignedSize < yuvImageSize) {
587         DHLOGE("Buffer size error, yuvImageSize %zu, yuvImageAlignedSize %zu, surBufSize %zu.",
588             yuvImageSize, yuvImageAlignedSize, surBuf->GetSize());
589         return false;
590     }
591     return true;
592 }
593 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)594 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
595 {
596     if (eventBusDecode_ == nullptr || outputBuffer == nullptr) {
597         DHLOGE("eventBusDecode_ or outputBuffer is null.");
598         return;
599     }
600     std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
601     multiDataBuffers.push_back(outputBuffer);
602     std::shared_ptr<CodecPacket> transNextNodePacket = std::make_shared<CodecPacket>(VideoCodecType::NO_CODEC,
603         multiDataBuffers);
604     DCameraCodecEvent dCamCodecEv(*this, transNextNodePacket, VideoCodecAction::NO_ACTION);
605     eventBusDecode_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
606     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
607 }
608 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)609 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
610 {
611     DHLOGD("Decoder Done.");
612     if (outputBuffers.empty()) {
613         DHLOGE("The received data buffers is empty.");
614         return DCAMERA_BAD_VALUE;
615     }
616 
617     if (nextDataProcess_ != nullptr) {
618         DHLOGD("Send to the next node of the decoder for processing.");
619         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
620         if (err != DCAMERA_OK) {
621             DHLOGE("Someone node after the decoder processes failed.");
622         }
623         return err;
624     }
625     DHLOGD("The current node is the last node, and Output the processed video buffer");
626     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
627     if (targetPipelineSource == nullptr) {
628         DHLOGE("callbackPipelineSource_ is nullptr.");
629         return DCAMERA_BAD_VALUE;
630     }
631     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
632     return DCAMERA_OK;
633 }
634 
OnEvent(DCameraCodecEvent & ev)635 void DecodeDataProcess::OnEvent(DCameraCodecEvent& ev)
636 {
637     DHLOGD("Receiving asynchronous DCameraCodecEvents.");
638     std::shared_ptr<CodecPacket> receivedCodecPacket = ev.GetCodecPacket();
639     VideoCodecAction action = ev.GetAction();
640     switch (action) {
641         case VideoCodecAction::NO_ACTION: {
642             if (receivedCodecPacket == nullptr) {
643                 DHLOGE("the received codecPacket of action [%d] is null.", action);
644                 OnError();
645                 return;
646             }
647             std::vector<std::shared_ptr<DataBuffer>> dataBuffers = receivedCodecPacket->GetDataBuffers();
648             DecodeDone(dataBuffers);
649             break;
650         }
651         case VideoCodecAction::ACTION_ONCE_AGAIN:
652             DHLOGD("Try FeedDecoderInputBuffer again.");
653             FeedDecoderInputBuffer();
654             return;
655         case VideoCodecAction::ACTION_GET_DECODER_OUTPUT_BUFFER:
656             if (receivedCodecPacket == nullptr) {
657                 DHLOGE("the received codecPacket of action [%d] is null.", action);
658                 OnError();
659                 return;
660             }
661             GetDecoderOutputBuffer(receivedCodecPacket->GetSurface());
662             break;
663         default:
664             DHLOGD("The action : %d is not supported.", action);
665             return;
666     }
667 }
668 
OnError()669 void DecodeDataProcess::OnError()
670 {
671     DHLOGD("DecodeDataProcess : OnError.");
672     isDecoderProcess_.store(false);
673     if (videoDecoder_ != nullptr) {
674         videoDecoder_->Stop();
675     }
676     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
677     if (targetPipelineSource == nullptr) {
678         DHLOGE("callbackPipelineSource_ is nullptr.");
679         return;
680     }
681     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
682 }
683 
OnInputBufferAvailable(uint32_t index)684 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index)
685 {
686     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
687     std::lock_guard<std::mutex> lck(mtxHoldCount_);
688     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
689         DHLOGE("Video decoder available indexs queue overflow.");
690         return;
691     }
692     DHLOGD("Video decoder available indexs queue push index [%u].", index);
693     availableInputIndexsQueue_.push(index);
694 }
695 
OnOutputFormatChanged(const Media::Format & format)696 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
697 {
698     if (decodeOutputFormat_.GetFormatMap().empty()) {
699         DHLOGE("The first changed video decoder output format is null.");
700         return;
701     }
702     decodeOutputFormat_ = format;
703 }
704 
OnOutputBufferAvailable(uint32_t index,const Media::AVCodecBufferInfo & info,const Media::AVCodecBufferFlag & flag)705 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const Media::AVCodecBufferInfo& info,
706     const Media::AVCodecBufferFlag& flag)
707 {
708     int64_t finishDecodeT = GetNowTimeStampUs();
709     if (!isDecoderProcess_.load()) {
710         DHLOGE("Decoder node occurred error or start release.");
711         return;
712     }
713     DHLOGD("Video decode buffer info: presentation TimeUs %lld, size %d, offset %d, flag %d",
714         info.presentationTimeUs, info.size, info.offset, flag);
715     outputInfo_ = info;
716     {
717         std::lock_guard<std::mutex> lock(mtxDequeLock_);
718         AlignFirstFrameTime();
719         for (auto it = frameInfoDeque_.begin(); it != frameInfoDeque_.end(); it++) {
720             DCameraFrameInfo frameInfo = *it;
721             if (frameInfo.timePonit.finishDecode != 0) {
722                 continue;
723             }
724             frameInfo.timePonit.finishDecode = finishDecodeT;
725             frameInfoDeque_.emplace(frameInfoDeque_.erase(it), frameInfo);
726             break;
727         }
728     }
729     {
730         std::lock_guard<std::mutex> outputLock(mtxDecoderState_);
731         if (videoDecoder_ == nullptr) {
732             DHLOGE("The video decoder does not exist before decoding data.");
733             return;
734         }
735         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
736         if (errRelease != Media::MediaServiceErrCode::MSERR_OK) {
737             DHLOGE("The video decoder output decoded data to surface failed, index : [%u].", index);
738         }
739     }
740 }
741 
GetSourceConfig() const742 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
743 {
744     return sourceConfig_;
745 }
746 
GetTargetConfig() const747 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
748 {
749     return targetConfig_;
750 }
751 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)752 int32_t DecodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
753 {
754     return DCAMERA_OK;
755 }
756 
AlignFirstFrameTime()757 void DecodeDataProcess::AlignFirstFrameTime()
758 {
759     if (frameInfoDeque_.empty()) {
760         return;
761     }
762     DCameraFrameInfo frameInfo = frameInfoDeque_.front();
763     if (frameInfo.index != FRAME_HEAD || frameInfo.type != Media::AVCODEC_BUFFER_FLAG_CODEC_DATA) {
764         return;
765     }
766     frameInfoDeque_.pop_front();
767     DCameraFrameInfo front = frameInfoDeque_.front();
768     frameInfo.index = front.index;
769     frameInfo.pts = front.pts;
770     frameInfo.offset = front.offset;
771     frameInfo.type = front.type;
772     frameInfo.ver = front.ver;
773     frameInfo.timePonit.finishEncode = front.timePonit.finishEncode;
774     frameInfoDeque_.emplace(frameInfoDeque_.erase(frameInfoDeque_.begin()), frameInfo);
775 }
776 } // namespace DistributedHardware
777 } // namespace OHOS
778