• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "decode_data_process.h"
17 
18 #include "distributed_hardware_log.h"
19 #include "graphic_common_c.h"
20 
21 #include "dcamera_hisysevent_adapter.h"
22 #include "dcamera_utils_tools.h"
23 #include "decode_surface_listener.h"
24 #include "decode_video_callback.h"
25 
26 namespace OHOS {
27 namespace DistributedHardware {
28 const string ENUM_VIDEOFORMAT_STRINGS[] = {
29     "YUVI420", "NV12", "NV21", "RGBA_8888"
30 };
31 
~DecodeDataProcess()32 DecodeDataProcess::~DecodeDataProcess()
33 {
34     if (isDecoderProcess_.load()) {
35         DHLOGD("~DecodeDataProcess : ReleaseProcessNode.");
36         ReleaseProcessNode();
37     }
38 }
39 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)40 int32_t DecodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
41     VideoConfigParams& processedConfig)
42 {
43     DHLOGD("Init DCamera DecodeNode start.");
44     if (!(IsInDecoderRange(sourceConfig) && IsInDecoderRange(targetConfig))) {
45         DHLOGE("Source config or target config are invalid.");
46         return DCAMERA_BAD_VALUE;
47     }
48     if (!IsConvertible(sourceConfig, targetConfig)) {
49         DHLOGE("The DecodeNode can't convert %d to %d.", sourceConfig.GetVideoCodecType(),
50             targetConfig_.GetVideoCodecType());
51         return DCAMERA_BAD_TYPE;
52     }
53 
54     sourceConfig_ = sourceConfig;
55     targetConfig_ = targetConfig;
56     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
57         DHLOGD("Disable DecodeNode. The target video codec type %d is the same as the source video codec type %d.",
58             sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType());
59         processedConfig_ = sourceConfig;
60         processedConfig = processedConfig_;
61         isDecoderProcess_.store(true);
62         return DCAMERA_OK;
63     }
64 
65     InitCodecEvent();
66     int32_t err = InitDecoder();
67     if (err != DCAMERA_OK) {
68         DHLOGE("Init video decoder failed.");
69         ReleaseProcessNode();
70         return err;
71     }
72     alignedHeight_ = GetAlignedHeight(sourceConfig_.GetHeight());
73     processedConfig = processedConfig_;
74     isDecoderProcess_.store(true);
75     return DCAMERA_OK;
76 }
77 
IsInDecoderRange(const VideoConfigParams & curConfig)78 bool DecodeDataProcess::IsInDecoderRange(const VideoConfigParams& curConfig)
79 {
80     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
81         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
82         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
83 }
84 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)85 bool DecodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
86 {
87     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
88         targetConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
89 }
90 
InitCodecEvent()91 void DecodeDataProcess::InitCodecEvent()
92 {
93     DHLOGD("Init DecodeNode eventBus, and add handler for it.");
94     eventBusDecode_ = std::make_shared<EventBus>("DeDtProcHandler");
95     DCameraCodecEvent codecEvent(*this, std::make_shared<CodecPacket>());
96     eventBusRegHandleDecode_ = eventBusDecode_->AddHandler<DCameraCodecEvent>(codecEvent.GetType(), *this);
97 
98     DHLOGD("Add handler for DCamera pipeline eventBus.");
99     eventBusRegHandlePipeline2Decode_ = eventBusPipeline_->AddHandler<DCameraCodecEvent>(codecEvent.GetType(), *this);
100 }
101 
InitDecoder()102 int32_t DecodeDataProcess::InitDecoder()
103 {
104     DHLOGD("Init video decoder.");
105     int32_t ret = ConfigureVideoDecoder();
106     if (ret != DCAMERA_OK) {
107         DHLOGE("Init video decoder metadata format failed.");
108         return ret;
109     }
110 
111     ret = StartVideoDecoder();
112     if (ret != DCAMERA_OK) {
113         DHLOGE("Start Video decoder failed.");
114         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_DECODE_ERROR,
115             CreateMsg("start video decoder failed, width: %d, height: %d, format: %s", sourceConfig_.GetWidth(),
116             sourceConfig_.GetHeight(),
117             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
118         return ret;
119     }
120     return DCAMERA_OK;
121 }
122 
ConfigureVideoDecoder()123 int32_t DecodeDataProcess::ConfigureVideoDecoder()
124 {
125     int32_t ret = InitDecoderMetadataFormat();
126     if (ret != DCAMERA_OK) {
127         DHLOGE("Init video decoder metadata format failed. Error code %d.", ret);
128         return ret;
129     }
130 
131     videoDecoder_ = Media::VideoDecoderFactory::CreateByMime(processType_);
132     if (videoDecoder_ == nullptr) {
133         DHLOGE("Create video decoder failed.");
134         return DCAMERA_INIT_ERR;
135     }
136     decodeVideoCallback_ = std::make_shared<DecodeVideoCallback>(shared_from_this());
137     ret = videoDecoder_->SetCallback(decodeVideoCallback_);
138     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
139         DHLOGE("Set video decoder callback failed. Error code %d.", ret);
140         return DCAMERA_INIT_ERR;
141     }
142 
143     ret = videoDecoder_->Configure(metadataFormat_);
144     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
145         DHLOGE("Set video decoder metadata format failed. Error code %d.", ret);
146         return DCAMERA_INIT_ERR;
147     }
148 
149     ret = SetDecoderOutputSurface();
150     if (ret != DCAMERA_OK) {
151         DHLOGE("Set decoder output surface failed. Error code %d.", ret);
152         return ret;
153     }
154 
155     return DCAMERA_OK;
156 }
157 
InitDecoderMetadataFormat()158 int32_t DecodeDataProcess::InitDecoderMetadataFormat()
159 {
160     DHLOGD("Init video decoder metadata format.");
161     switch (sourceConfig_.GetVideoCodecType()) {
162         case VideoCodecType::CODEC_H264:
163             processType_ = "video/avc";
164             metadataFormat_.PutStringValue("codec_mime", processType_);
165             break;
166         case VideoCodecType::CODEC_H265:
167             processType_ = "video/hevc";
168             metadataFormat_.PutStringValue("codec_mime", processType_);
169             break;
170         default:
171             DHLOGE("The current codec type does not support decoding.");
172             return DCAMERA_NOT_FOUND;
173     }
174 
175     metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12);
176     metadataFormat_.PutIntValue("max_input_size", MAX_YUV420_BUFFER_SIZE);
177     metadataFormat_.PutIntValue("width", sourceConfig_.GetWidth());
178     metadataFormat_.PutIntValue("height", sourceConfig_.GetHeight());
179     metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE);
180 
181     processedConfig_ = sourceConfig_;
182     processedConfig_.SetVideoCodecType(VideoCodecType::NO_CODEC);
183     processedConfig_.SetVideoformat(Videoformat::NV12);
184     return DCAMERA_OK;
185 }
186 
SetDecoderOutputSurface()187 int32_t DecodeDataProcess::SetDecoderOutputSurface()
188 {
189     DHLOGD("Set the video decoder output surface.");
190     if (videoDecoder_ == nullptr) {
191         DHLOGE("The video decoder is null.");
192         return DCAMERA_BAD_VALUE;
193     }
194 
195     decodeConsumerSurface_ = Surface::CreateSurfaceAsConsumer();
196     if (decodeConsumerSurface_ == nullptr) {
197         DHLOGE("Create the decode consumer surface failed.");
198         return DCAMERA_INIT_ERR;
199     }
200     decodeConsumerSurface_->SetDefaultWidthAndHeight((int32_t)sourceConfig_.GetWidth(),
201         (int32_t)sourceConfig_.GetHeight());
202     decodeSurfaceListener_ = new DecodeSurfaceListener(decodeConsumerSurface_, shared_from_this());
203     if (decodeConsumerSurface_->RegisterConsumerListener(decodeSurfaceListener_) !=
204         SURFACE_ERROR_OK) {
205         DHLOGE("Register consumer listener failed.");
206         return DCAMERA_INIT_ERR;
207     }
208 
209     sptr<IBufferProducer> surfaceProducer = decodeConsumerSurface_->GetProducer();
210     if (surfaceProducer == nullptr) {
211         DHLOGE("Get the surface producer of the decode consumer surface failed.");
212         return DCAMERA_INIT_ERR;
213     }
214     decodeProducerSurface_ = Surface::CreateSurfaceAsProducer(surfaceProducer);
215     if (decodeProducerSurface_ == nullptr) {
216         DHLOGE("Create the decode producer surface of the decode consumer surface failed.");
217         return DCAMERA_INIT_ERR;
218     }
219 
220     DHLOGD("Set the producer surface to video decoder output surface.");
221     int32_t err = videoDecoder_->SetOutputSurface(decodeProducerSurface_);
222     if (err != Media::MediaServiceErrCode::MSERR_OK) {
223         DHLOGE("Set decoder output surface failed.");
224         return DCAMERA_INIT_ERR;
225     }
226     return DCAMERA_OK;
227 }
228 
StartVideoDecoder()229 int32_t DecodeDataProcess::StartVideoDecoder()
230 {
231     if (videoDecoder_ == nullptr) {
232         DHLOGE("The video decoder does not exist before StartVideoDecoder.");
233         return DCAMERA_BAD_VALUE;
234     }
235 
236     int32_t ret = videoDecoder_->Prepare();
237     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
238         DHLOGE("Video decoder prepare failed. Error code %d.", ret);
239         return DCAMERA_INIT_ERR;
240     }
241     ret = videoDecoder_->Start();
242     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
243         DHLOGE("Video decoder start failed. Error code %d.", ret);
244         return DCAMERA_INIT_ERR;
245     }
246     return DCAMERA_OK;
247 }
248 
StopVideoDecoder()249 int32_t DecodeDataProcess::StopVideoDecoder()
250 {
251     if (videoDecoder_ == nullptr) {
252         DHLOGE("The video decoder does not exist before StopVideoDecoder.");
253         return DCAMERA_BAD_VALUE;
254     }
255 
256     bool isSuccess = true;
257     int32_t ret = videoDecoder_->Flush();
258     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
259         DHLOGE("VideoDecoder flush failed. Error type: %d.", ret);
260         isSuccess = isSuccess && false;
261     }
262     ret = videoDecoder_->Stop();
263     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
264         DHLOGE("VideoDecoder stop failed. Error type: %d.", ret);
265         isSuccess = isSuccess && false;
266     }
267     if (!isSuccess) {
268         return DCAMERA_BAD_OPERATE;
269     }
270     return DCAMERA_OK;
271 }
272 
ReleaseVideoDecoder()273 void DecodeDataProcess::ReleaseVideoDecoder()
274 {
275     std::lock_guard<std::mutex> lck(mtxDecoderState_);
276     DHLOGD("Start release videoDecoder.");
277     if (videoDecoder_ == nullptr) {
278         DHLOGE("The video decoder does not exist before ReleaseVideoDecoder.");
279         decodeVideoCallback_ = nullptr;
280         return;
281     }
282     int32_t ret = StopVideoDecoder();
283     if (ret != DCAMERA_OK) {
284         DHLOGE("StopVideoDecoder failed.");
285     }
286     ret = videoDecoder_->Release();
287     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
288         DHLOGE("VideoDecoder release failed. Error type: %d.", ret);
289     }
290     videoDecoder_ = nullptr;
291     decodeVideoCallback_ = nullptr;
292 }
293 
ReleaseDecoderSurface()294 void DecodeDataProcess::ReleaseDecoderSurface()
295 {
296     if (decodeConsumerSurface_ == nullptr) {
297         decodeProducerSurface_ = nullptr;
298         DHLOGE("The decode consumer surface does not exist before UnregisterConsumerListener.");
299         return;
300     }
301     int32_t ret = decodeConsumerSurface_->UnregisterConsumerListener();
302     if (ret != SURFACE_ERROR_OK) {
303         DHLOGE("Unregister consumer listener failed. Error type: %d.", ret);
304     }
305     decodeConsumerSurface_ = nullptr;
306     decodeProducerSurface_ = nullptr;
307 }
308 
ReleaseCodecEvent()309 void DecodeDataProcess::ReleaseCodecEvent()
310 {
311     DCameraCodecEvent codecEvent(*this, std::make_shared<CodecPacket>());
312     if (eventBusDecode_ != nullptr) {
313         eventBusDecode_->RemoveHandler<DCameraCodecEvent>(codecEvent.GetType(), eventBusRegHandleDecode_);
314         eventBusRegHandleDecode_ = nullptr;
315         eventBusDecode_ = nullptr;
316     }
317     if (eventBusPipeline_ != nullptr) {
318         eventBusPipeline_->RemoveHandler<DCameraCodecEvent>(codecEvent.GetType(), eventBusRegHandlePipeline2Decode_);
319         eventBusRegHandlePipeline2Decode_ = nullptr;
320         eventBusPipeline_ = nullptr;
321     }
322     DHLOGD("Release DecodeNode eventBusDecode and eventBusPipeline end.");
323 }
324 
ReleaseProcessNode()325 void DecodeDataProcess::ReleaseProcessNode()
326 {
327     DHLOGD("Start release [%d] node : DecodeNode.", nodeRank_);
328     isDecoderProcess_.store(false);
329     ReleaseVideoDecoder();
330     ReleaseDecoderSurface();
331     ReleaseCodecEvent();
332 
333     processType_ = "";
334     std::queue<std::shared_ptr<DataBuffer>>().swap(inputBuffersQueue_);
335     std::queue<uint32_t>().swap(availableInputIndexsQueue_);
336     waitDecoderOutputCount_ = 0;
337     lastFeedDecoderInputBufferTimeUs_ = 0;
338     outputTimeStampUs_ = 0;
339     alignedHeight_ = 0;
340 
341     if (nextDataProcess_ != nullptr) {
342         nextDataProcess_->ReleaseProcessNode();
343         nextDataProcess_ = nullptr;
344     }
345     DHLOGD("Release [%d] node : DecodeNode end.", nodeRank_);
346 }
347 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)348 int32_t DecodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
349 {
350     DHLOGD("Process data in DecodeDataProcess.");
351     if (inputBuffers.empty()) {
352         DHLOGE("The input data buffers is empty.");
353         return DCAMERA_BAD_VALUE;
354     }
355     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
356         DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.",
357             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
358         return DecodeDone(inputBuffers);
359     }
360 
361     if (videoDecoder_ == nullptr) {
362         DHLOGE("The video decoder does not exist before decoding data.");
363         return DCAMERA_INIT_ERR;
364     }
365     if (inputBuffersQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
366         DHLOGE("video decoder input buffers queue over flow.");
367         return DCAMERA_INDEX_OVERFLOW;
368     }
369     if (inputBuffers[0]->Size() > MAX_YUV420_BUFFER_SIZE) {
370         DHLOGE("DecodeNode input buffer size %zu error.", inputBuffers[0]->Size());
371         return DCAMERA_MEMORY_OPT_ERROR;
372     }
373     if (!isDecoderProcess_.load()) {
374         DHLOGE("Decoder node occurred error or start release.");
375         return DCAMERA_DISABLE_PROCESS;
376     }
377     inputBuffersQueue_.push(inputBuffers[0]);
378     DHLOGD("Push inputBuf sucess. BufSize %zu, QueueSize %zu.", inputBuffers[0]->Size(), inputBuffersQueue_.size());
379     int32_t err = FeedDecoderInputBuffer();
380     if (err != DCAMERA_OK) {
381         int32_t sleepTimeUs = 5000;
382         std::this_thread::sleep_for(std::chrono::microseconds(sleepTimeUs));
383         DHLOGD("Feed decoder input buffer failed. Try FeedDecoderInputBuffer again.");
384         std::shared_ptr<CodecPacket> reFeedInputPacket = std::make_shared<CodecPacket>();
385         reFeedInputPacket->SetVideoCodecType(sourceConfig_.GetVideoCodecType());
386         DCameraCodecEvent dCamCodecEv(*this, reFeedInputPacket, VideoCodecAction::ACTION_ONCE_AGAIN);
387         if (eventBusPipeline_ == nullptr) {
388             DHLOGE("eventBusPipeline_ is nullptr.");
389             return DCAMERA_BAD_VALUE;
390         }
391         eventBusPipeline_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
392     }
393     return DCAMERA_OK;
394 }
395 
FeedDecoderInputBuffer()396 int32_t DecodeDataProcess::FeedDecoderInputBuffer()
397 {
398     DHLOGD("Feed decoder input buffer.");
399     while ((!inputBuffersQueue_.empty()) && (isDecoderProcess_.load())) {
400         std::shared_ptr<DataBuffer> buffer = inputBuffersQueue_.front();
401         if (buffer == nullptr || availableInputIndexsQueue_.empty()) {
402             DHLOGE("inputBuffersQueue size %zu, availableInputIndexsQueue size %zu.",
403                 inputBuffersQueue_.size(), availableInputIndexsQueue_.size());
404             return DCAMERA_BAD_VALUE;
405         }
406 
407         {
408             std::lock_guard<std::mutex> lck(mtxDecoderState_);
409             if (videoDecoder_ == nullptr) {
410                 DHLOGE("The video decoder does not exist before GetInputBuffer.");
411                 return DCAMERA_OK;
412             }
413             uint32_t index = availableInputIndexsQueue_.front();
414             std::shared_ptr<Media::AVSharedMemory> sharedMemoryInput = videoDecoder_->GetInputBuffer(index);
415             if (sharedMemoryInput == nullptr) {
416                 DHLOGE("Failed to obtain the input shared memory corresponding to the [%u] index.", index);
417                 return DCAMERA_BAD_VALUE;
418             }
419             size_t inputMemoDataSize = static_cast<size_t>(sharedMemoryInput->GetSize());
420             errno_t err = memcpy_s(sharedMemoryInput->GetBase(), inputMemoDataSize, buffer->Data(), buffer->Size());
421             if (err != EOK) {
422                 DHLOGE("memcpy_s buffer failed.");
423                 return DCAMERA_MEMORY_OPT_ERROR;
424             }
425             int64_t timeUs = GetDecoderTimeStamp();
426             DHLOGD("Decoder input buffer size %zu, timeStamp %lld.", buffer->Size(), (long long)timeUs);
427             Media::AVCodecBufferInfo bufferInfo {timeUs, static_cast<int32_t>(buffer->Size()), 0};
428             int32_t ret = videoDecoder_->QueueInputBuffer(index, bufferInfo,
429                 Media::AVCODEC_BUFFER_FLAG_NONE);
430             if (ret != Media::MediaServiceErrCode::MSERR_OK) {
431                 DHLOGE("queue Input buffer failed.");
432                 return DCAMERA_BAD_OPERATE;
433             }
434         }
435 
436         inputBuffersQueue_.pop();
437         DHLOGD("Push inputBuffer sucess. inputBuffersQueue size is %d.", inputBuffersQueue_.size());
438 
439         IncreaseWaitDecodeCnt();
440     }
441     return DCAMERA_OK;
442 }
443 
GetDecoderTimeStamp()444 int64_t DecodeDataProcess::GetDecoderTimeStamp()
445 {
446     int64_t TimeIntervalStampUs = 0;
447     int64_t nowTimeUs = GetNowTimeStampUs();
448     if (lastFeedDecoderInputBufferTimeUs_ == 0) {
449         lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
450         return TimeIntervalStampUs;
451     }
452     TimeIntervalStampUs = nowTimeUs - lastFeedDecoderInputBufferTimeUs_;
453     lastFeedDecoderInputBufferTimeUs_ = nowTimeUs;
454     return TimeIntervalStampUs;
455 }
456 
IncreaseWaitDecodeCnt()457 void DecodeDataProcess::IncreaseWaitDecodeCnt()
458 {
459     std::lock_guard<std::mutex> lck(mtxHoldCount_);
460     availableInputIndexsQueue_.pop();
461     waitDecoderOutputCount_++;
462     DHLOGD("Wait decoder output frames number is %d.", waitDecoderOutputCount_);
463 }
464 
ReduceWaitDecodeCnt()465 void DecodeDataProcess::ReduceWaitDecodeCnt()
466 {
467     std::lock_guard<std::mutex> lck(mtxHoldCount_);
468     if (waitDecoderOutputCount_ <= 0) {
469         DHLOGE("The waitDecoderOutputCount_ = %d.", waitDecoderOutputCount_);
470     }
471     if (outputTimeStampUs_ == 0) {
472         waitDecoderOutputCount_ -= FIRST_FRAME_INPUT_NUM;
473     } else {
474         waitDecoderOutputCount_--;
475     }
476     DHLOGD("Wait decoder output frames number is %d.", waitDecoderOutputCount_);
477 }
478 
GetDecoderOutputBuffer(const sptr<Surface> & surface)479 void DecodeDataProcess::GetDecoderOutputBuffer(const sptr<Surface>& surface)
480 {
481     DHLOGD("Get decoder output buffer.");
482     if (surface == nullptr) {
483         DHLOGE("Get decode consumer surface failed.");
484         return;
485     }
486     Rect damage = {0, 0, 0, 0};
487     int32_t acquireFence = 0;
488     int64_t timeStampUs = 0;
489     sptr<SurfaceBuffer> surfaceBuffer = nullptr;
490     GSError ret = surface->AcquireBuffer(surfaceBuffer, acquireFence, timeStampUs, damage);
491     if (ret != GSERROR_OK || surfaceBuffer == nullptr) {
492         DHLOGE("Acquire surface buffer failed!");
493         return;
494     }
495     int32_t alignedWidth = surfaceBuffer->GetStride();
496     if (surfaceBuffer->GetSize() > BUFFER_MAX_SIZE || alignedWidth > ALIGNED_WIDTH_MAX_SIZE) {
497         DHLOGE("surface buffer size or alignedWidth too long");
498         return;
499     }
500     int32_t alignedHeight = alignedHeight_;
501     DHLOGD("OutputBuffer alignedWidth %d, alignedHeight %d, TimeUs %lld.", alignedWidth, alignedHeight, timeStampUs);
502     CopyDecodedImage(surfaceBuffer, timeStampUs, alignedWidth, alignedHeight);
503     surface->ReleaseBuffer(surfaceBuffer, -1);
504     outputTimeStampUs_ = timeStampUs;
505     ReduceWaitDecodeCnt();
506 }
507 
CopyDecodedImage(const sptr<SurfaceBuffer> & surBuf,int64_t timeStampUs,int32_t alignedWidth,int32_t alignedHeight)508 void DecodeDataProcess::CopyDecodedImage(const sptr<SurfaceBuffer>& surBuf, int64_t timeStampUs, int32_t alignedWidth,
509     int32_t alignedHeight)
510 {
511     if (!IsCorrectSurfaceBuffer(surBuf, alignedWidth, alignedHeight)) {
512         DHLOGE("Surface output buffer error.");
513         return;
514     }
515 
516     size_t yuvImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
517                                                        YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
518     std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(yuvImageSize);
519     uint8_t *addr = static_cast<uint8_t *>(surBuf->GetVirAddr());
520     if (alignedWidth == sourceConfig_.GetWidth() &&
521         alignedHeight == sourceConfig_.GetHeight()) {
522         errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(), addr, yuvImageSize);
523         if (err != EOK) {
524             DHLOGE("memcpy_s surface buffer failed.");
525             return;
526         }
527     } else {
528         ImageUnitInfo srcImgInfo = { processedConfig_.GetVideoformat(), sourceConfig_.GetWidth(),
529             sourceConfig_.GetHeight(), alignedWidth, alignedHeight, static_cast<size_t>(alignedWidth * alignedHeight),
530             surBuf->GetSize(), addr };
531         ImageUnitInfo dstImgInfo = { processedConfig_.GetVideoformat(), processedConfig_.GetWidth(),
532             processedConfig_.GetHeight(), processedConfig_.GetWidth(), processedConfig_.GetHeight(),
533             processedConfig_.GetWidth() * processedConfig_.GetHeight(), bufferOutput->Size(), bufferOutput->Data() };
534         int32_t retRow = CopyYUVPlaneByRow(srcImgInfo, dstImgInfo);
535         if (retRow != DCAMERA_OK) {
536             DHLOGE("memcpy_s surface buffer failed.");
537             return;
538         }
539     }
540 
541     bufferOutput->SetInt64("timeUs", timeStampUs);
542     bufferOutput->SetInt32("Videoformat", static_cast<int32_t>(processedConfig_.GetVideoformat()));
543     bufferOutput->SetInt32("alignedWidth", processedConfig_.GetWidth());
544     bufferOutput->SetInt32("alignedHeight", processedConfig_.GetHeight());
545     bufferOutput->SetInt32("width", processedConfig_.GetWidth());
546     bufferOutput->SetInt32("height", processedConfig_.GetHeight());
547 
548     PostOutputDataBuffers(bufferOutput);
549 }
550 
CopyYUVPlaneByRow(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)551 int32_t DecodeDataProcess::CopyYUVPlaneByRow(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
552 {
553     int32_t ret = CheckCopyImageInfo(srcImgInfo, dstImgInfo);
554     if (ret != DCAMERA_OK) {
555         DHLOGE("Check CopyImageUnitInfo failed.");
556         return ret;
557     }
558 
559     /* Copy YPlane by Row */
560     int32_t srcDataOffset = 0;
561     int32_t dstDataOffset = 0;
562     for (int32_t yh = 0; yh < dstImgInfo.height; yh++) {
563         errno_t err = EOK;
564         err = memcpy_s(dstImgInfo.imgData + dstDataOffset, dstImgInfo.chromaOffset - dstDataOffset,
565             srcImgInfo.imgData + srcDataOffset, dstImgInfo.width);
566         if (err != EOK) {
567             DHLOGE("memcpy_s YPlane in line[%d] failed.", yh);
568             return DCAMERA_MEMORY_OPT_ERROR;
569         }
570         dstDataOffset += dstImgInfo.alignedWidth;
571         srcDataOffset += srcImgInfo.alignedWidth;
572     }
573     DHLOGD("Copy Yplane end, dstDataOffset %d, srcDataOffset %d, validYPlaneSize %d.",
574         dstDataOffset, srcDataOffset, dstImgInfo.chromaOffset);
575 
576     /* Copy UVPlane by Row */
577     dstDataOffset = dstImgInfo.chromaOffset;
578     srcDataOffset = srcImgInfo.chromaOffset;
579     for (int32_t uvh = 0; uvh < dstImgInfo.height / Y2UV_RATIO; uvh++) {
580         errno_t err = EOK;
581         err = memcpy_s(dstImgInfo.imgData + dstDataOffset, dstImgInfo.imgSize - dstDataOffset,
582             srcImgInfo.imgData + srcDataOffset, dstImgInfo.width);
583         if (err != EOK) {
584             DHLOGE("memcpy_s UVPlane in line[%d] failed.", uvh);
585             return DCAMERA_MEMORY_OPT_ERROR;
586         }
587         dstDataOffset += dstImgInfo.alignedWidth;
588         srcDataOffset += srcImgInfo.alignedWidth;
589     }
590     DHLOGD("Copy UVplane end, dstDataOffset %d, srcDataOffset %d.", dstDataOffset, srcDataOffset);
591     return DCAMERA_OK;
592 }
593 
CheckCopyImageInfo(const ImageUnitInfo & srcImgInfo,const ImageUnitInfo & dstImgInfo)594 int32_t DecodeDataProcess::CheckCopyImageInfo(const ImageUnitInfo& srcImgInfo, const ImageUnitInfo& dstImgInfo)
595 {
596     if (srcImgInfo.imgData == nullptr || dstImgInfo.imgData == nullptr) {
597         DHLOGE("The imgData of srcImgInfo or the imgData of dstImgInfo are null!");
598         return DCAMERA_BAD_VALUE;
599     }
600     if (srcImgInfo.colorFormat != dstImgInfo.colorFormat) {
601         DHLOGE("CopyInfo error : srcImgInfo colorFormat %d, dstImgInfo colorFormat %d.",
602             srcImgInfo.colorFormat, dstImgInfo.colorFormat);
603         return DCAMERA_BAD_VALUE;
604     }
605 
606     if (!IsCorrectImageUnitInfo(srcImgInfo)) {
607         DHLOGE("srcImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " +
608             "imgSize %lld.", srcImgInfo.width, srcImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight,
609             srcImgInfo.chromaOffset, srcImgInfo.imgSize);
610         return DCAMERA_BAD_VALUE;
611     }
612     if (!IsCorrectImageUnitInfo(dstImgInfo)) {
613         DHLOGE("dstImginfo fail: width %d, height %d, alignedWidth %d, alignedHeight %d, chromaOffset %lld, " +
614             "imgSize %lld.", dstImgInfo.width, dstImgInfo.height, dstImgInfo.alignedWidth, dstImgInfo.alignedHeight,
615             dstImgInfo.chromaOffset, dstImgInfo.imgSize);
616         return DCAMERA_BAD_VALUE;
617     }
618 
619     if (dstImgInfo.width > srcImgInfo.alignedWidth || dstImgInfo.height > srcImgInfo.alignedHeight) {
620         DHLOGE("Comparison ImgInfo fail: dstwidth %d, dstheight %d, srcAlignedWidth %d, srcAlignedHeight %d.",
621             dstImgInfo.width, dstImgInfo.height, srcImgInfo.alignedWidth, srcImgInfo.alignedHeight);
622         return DCAMERA_BAD_VALUE;
623     }
624     return DCAMERA_OK;
625 }
626 
IsCorrectImageUnitInfo(const ImageUnitInfo & imgInfo)627 bool DecodeDataProcess::IsCorrectImageUnitInfo(const ImageUnitInfo& imgInfo)
628 {
629     size_t expectedImgSize = static_cast<size_t>(imgInfo.alignedWidth * imgInfo.alignedHeight *
630         YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
631     size_t expectedChromaOffset = static_cast<size_t>(imgInfo.alignedWidth * imgInfo.alignedHeight);
632     return (imgInfo.width <= imgInfo.alignedWidth && imgInfo.height <= imgInfo.alignedHeight &&
633         imgInfo.imgSize >= expectedImgSize && imgInfo.chromaOffset == expectedChromaOffset);
634 }
635 
IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer> & surBuf,int32_t alignedWidth,int32_t alignedHeight)636 bool DecodeDataProcess::IsCorrectSurfaceBuffer(const sptr<SurfaceBuffer>& surBuf, int32_t alignedWidth,
637     int32_t alignedHeight)
638 {
639     if (surBuf == nullptr) {
640         DHLOGE("surface buffer is null!");
641         return false;
642     }
643 
644     size_t yuvImageAlignedSize = static_cast<size_t>(alignedWidth * alignedHeight *
645                                                               YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
646     size_t yuvImageSize = static_cast<size_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight() *
647                                                        YUV_BYTES_PER_PIXEL / Y2UV_RATIO);
648     size_t surfaceBufSize = static_cast<size_t>(surBuf->GetSize());
649     if (yuvImageAlignedSize > surfaceBufSize || yuvImageAlignedSize < yuvImageSize) {
650         DHLOGE("Buffer size error, yuvImageSize %zu, yuvImageAlignedSize %zu, surBufSize %zu.",
651             yuvImageSize, yuvImageAlignedSize, surBuf->GetSize());
652         return false;
653     }
654     return true;
655 }
656 
PostOutputDataBuffers(std::shared_ptr<DataBuffer> & outputBuffer)657 void DecodeDataProcess::PostOutputDataBuffers(std::shared_ptr<DataBuffer>& outputBuffer)
658 {
659     if (eventBusDecode_ == nullptr || outputBuffer == nullptr) {
660         DHLOGE("eventBusDecode_ or outputBuffer is null.");
661         return;
662     }
663     std::vector<std::shared_ptr<DataBuffer>> multiDataBuffers;
664     multiDataBuffers.push_back(outputBuffer);
665     std::shared_ptr<CodecPacket> transNextNodePacket = std::make_shared<CodecPacket>(VideoCodecType::NO_CODEC,
666         multiDataBuffers);
667     DCameraCodecEvent dCamCodecEv(*this, transNextNodePacket, VideoCodecAction::NO_ACTION);
668     eventBusDecode_->PostEvent<DCameraCodecEvent>(dCamCodecEv, POSTMODE::POST_ASYNC);
669     DHLOGD("Send video decoder output asynchronous DCameraCodecEvents success.");
670 }
671 
DecodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)672 int32_t DecodeDataProcess::DecodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
673 {
674     DHLOGD("Decoder Done.");
675     if (outputBuffers.empty()) {
676         DHLOGE("The received data buffers is empty.");
677         return DCAMERA_BAD_VALUE;
678     }
679 
680     if (nextDataProcess_ != nullptr) {
681         DHLOGD("Send to the next node of the decoder for processing.");
682         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
683         if (err != DCAMERA_OK) {
684             DHLOGE("Someone node after the decoder processes failed.");
685         }
686         return err;
687     }
688     DHLOGD("The current node is the last node, and Output the processed video buffer");
689     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
690     if (targetPipelineSource == nullptr) {
691         DHLOGE("callbackPipelineSource_ is nullptr.");
692         return DCAMERA_BAD_VALUE;
693     }
694     targetPipelineSource->OnProcessedVideoBuffer(outputBuffers[0]);
695     return DCAMERA_OK;
696 }
697 
OnEvent(DCameraCodecEvent & ev)698 void DecodeDataProcess::OnEvent(DCameraCodecEvent& ev)
699 {
700     DHLOGD("Receiving asynchronous DCameraCodecEvents.");
701     std::shared_ptr<CodecPacket> receivedCodecPacket = ev.GetCodecPacket();
702     VideoCodecAction action = ev.GetAction();
703     switch (action) {
704         case VideoCodecAction::NO_ACTION: {
705             if (receivedCodecPacket == nullptr) {
706                 DHLOGE("the received codecPacket of action [%d] is null.", action);
707                 OnError();
708                 return;
709             }
710 
711             std::vector<std::shared_ptr<DataBuffer>> yuvDataBuffers = receivedCodecPacket->GetDataBuffers();
712             DecodeDone(yuvDataBuffers);
713             break;
714         }
715         case VideoCodecAction::ACTION_ONCE_AGAIN:
716             DHLOGD("Try FeedDecoderInputBuffer again.");
717             FeedDecoderInputBuffer();
718             return;
719         default:
720             DHLOGD("The action : %d is not supported.", action);
721             return;
722     }
723 }
724 
OnError()725 void DecodeDataProcess::OnError()
726 {
727     DHLOGD("DecodeDataProcess : OnError.");
728     isDecoderProcess_.store(false);
729     if (videoDecoder_ != nullptr) {
730         videoDecoder_->Stop();
731     }
732     std::shared_ptr<DCameraPipelineSource> targetPipelineSource = callbackPipelineSource_.lock();
733     if (targetPipelineSource == nullptr) {
734         DHLOGE("callbackPipelineSource_ is nullptr.");
735         return;
736     }
737     targetPipelineSource->OnError(DataProcessErrorType::ERROR_PIPELINE_DECODER);
738 }
739 
OnInputBufferAvailable(uint32_t index)740 void DecodeDataProcess::OnInputBufferAvailable(uint32_t index)
741 {
742     DHLOGD("DecodeDataProcess::OnInputBufferAvailable");
743     std::lock_guard<std::mutex> lck(mtxHoldCount_);
744     if (availableInputIndexsQueue_.size() > VIDEO_DECODER_QUEUE_MAX) {
745         DHLOGE("Video decoder available indexs queue overflow.");
746         return;
747     }
748     DHLOGD("Video decoder available indexs queue push index [%u].", index);
749     availableInputIndexsQueue_.push(index);
750 }
751 
OnOutputFormatChanged(const Media::Format & format)752 void DecodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
753 {
754     if (decodeOutputFormat_.GetFormatMap().empty()) {
755         DHLOGE("The first changed video decoder output format is null.");
756         return;
757     }
758     decodeOutputFormat_ = format;
759 }
760 
OnOutputBufferAvailable(uint32_t index,const Media::AVCodecBufferInfo & info,const Media::AVCodecBufferFlag & flag)761 void DecodeDataProcess::OnOutputBufferAvailable(uint32_t index, const Media::AVCodecBufferInfo& info,
762     const Media::AVCodecBufferFlag& flag)
763 {
764     if (!isDecoderProcess_.load()) {
765         DHLOGE("Decoder node occurred error or start release.");
766         return;
767     }
768     DHLOGD("Video decode buffer info: presentation TimeUs %lld, size %d, offset %d, flag %d",
769         info.presentationTimeUs, info.size, info.offset, flag);
770     outputInfo_ = info;
771     {
772         std::lock_guard<std::mutex> lck(mtxDecoderState_);
773         if (videoDecoder_ == nullptr) {
774             DHLOGE("The video decoder does not exist before decoding data.");
775             return;
776         }
777         int32_t errRelease = videoDecoder_->ReleaseOutputBuffer(index, true);
778         if (errRelease != Media::MediaServiceErrCode::MSERR_OK) {
779             DHLOGE("The video decoder output decoded data to surface failed, index : [%u].", index);
780         }
781     }
782 }
783 
GetSourceConfig() const784 VideoConfigParams DecodeDataProcess::GetSourceConfig() const
785 {
786     return sourceConfig_;
787 }
788 
GetTargetConfig() const789 VideoConfigParams DecodeDataProcess::GetTargetConfig() const
790 {
791     return targetConfig_;
792 }
793 } // namespace DistributedHardware
794 } // namespace OHOS
795