• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <cmath>
17 #include "dcamera_hisysevent_adapter.h"
18 #include "dcamera_utils_tools.h"
19 #include "distributed_hardware_log.h"
20 #include "encode_data_process.h"
21 #include "encode_video_callback.h"
22 #include "graphic_common_c.h"
23 #include <ctime>
24 
25 #ifndef DH_LOG_TAG
26 #define DH_LOG_TAG "DCDP_NODE_ENCODEC"
27 #endif
28 
29 namespace OHOS {
30 namespace DistributedHardware {
31 const std::map<int64_t, int32_t> EncodeDataProcess::ENCODER_BITRATE_TABLE = {
32     std::map<int64_t, int32_t>::value_type(WIDTH_320_HEIGHT_240, BITRATE_500000),
33     std::map<int64_t, int32_t>::value_type(WIDTH_480_HEIGHT_360, BITRATE_1110000),
34     std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_360, BITRATE_1500000),
35     std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_480, BITRATE_1800000),
36     std::map<int64_t, int32_t>::value_type(WIDTH_720_HEIGHT_540, BITRATE_2100000),
37     std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_540, BITRATE_2300000),
38     std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_720, BITRATE_2800000),
39     std::map<int64_t, int32_t>::value_type(WIDTH_1280_HEIGHT_720, BITRATE_3400000),
40     std::map<int64_t, int32_t>::value_type(WIDTH_1440_HEIGHT_1080, BITRATE_5000000),
41     std::map<int64_t, int32_t>::value_type(WIDTH_1920_HEIGHT_1080, BITRATE_6000000),
42 };
43 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
44     "YUVI420", "NV12", "NV21", "RGBA_8888"
45 };
46 
~EncodeDataProcess()47 EncodeDataProcess::~EncodeDataProcess()
48 {
49     if (isEncoderProcess_.load()) {
50         DHLOGD("~EncodeDataProcess : ReleaseProcessNode.");
51         ReleaseProcessNode();
52     }
53 }
54 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)55 int32_t EncodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
56     VideoConfigParams& processedConfig)
57 {
58     DHLOGD("Init DCamera EncodeNode start.");
59     if (!(IsInEncoderRange(sourceConfig) && IsInEncoderRange(targetConfig))) {
60         DHLOGE("Source config or target config are invalid.");
61         return DCAMERA_BAD_VALUE;
62     }
63     if (!IsConvertible(sourceConfig, targetConfig)) {
64         DHLOGE("The EncodeNode cannot convert source VideoCodecType %d to target VideoCodecType %d.",
65             sourceConfig.GetVideoCodecType(), targetConfig.GetVideoCodecType());
66         return DCAMERA_BAD_TYPE;
67     }
68 
69     sourceConfig_ = sourceConfig;
70     targetConfig_ = targetConfig;
71     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
72         DHLOGD("Disable EncodeNode. The target VideoCodecType %d is the same as the source VideoCodecType %d.",
73             sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType());
74         processedConfig_ = sourceConfig;
75         processedConfig = processedConfig_;
76         isEncoderProcess_.store(true);
77         return DCAMERA_OK;
78     }
79 
80     int32_t err = InitEncoder();
81     if (err != DCAMERA_OK) {
82         DHLOGE("Init video encoder failed.");
83         ReleaseProcessNode();
84         return err;
85     }
86     processedConfig = processedConfig_;
87     isEncoderProcess_.store(true);
88     return DCAMERA_OK;
89 }
90 
IsInEncoderRange(const VideoConfigParams & curConfig)91 bool EncodeDataProcess::IsInEncoderRange(const VideoConfigParams& curConfig)
92 {
93     return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
94         curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
95         curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
96 }
97 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)98 bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
99 {
100     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
101         sourceConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
102 }
103 
InitEncoder()104 int32_t EncodeDataProcess::InitEncoder()
105 {
106     DHLOGD("Init video encoder.");
107     int32_t ret = ConfigureVideoEncoder();
108     if (ret != DCAMERA_OK) {
109         DHLOGE("Init video encoder metadata format failed. Error code %d.", ret);
110         return ret;
111     }
112 
113     ret = StartVideoEncoder();
114     if (ret != DCAMERA_OK) {
115         DHLOGE("Start Video encoder failed.");
116         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_ENCODE_ERROR,
117             CreateMsg("start video encoder failed, width: %d, height: %d, format: %s", sourceConfig_.GetWidth(),
118             sourceConfig_.GetHeight(),
119             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
120         return ret;
121     }
122 
123     return DCAMERA_OK;
124 }
125 
ConfigureVideoEncoder()126 int32_t EncodeDataProcess::ConfigureVideoEncoder()
127 {
128     int32_t ret = InitEncoderMetadataFormat();
129     if (ret != DCAMERA_OK) {
130         DHLOGE("Init video encoder metadata format failed. Error code %d.", ret);
131         return ret;
132     }
133     ret = InitEncoderBitrateFormat();
134     if (ret != DCAMERA_OK) {
135         DHLOGE("Init video encoder bitrate format failed. Error code %d.", ret);
136         return ret;
137     }
138 
139     videoEncoder_ = Media::VideoEncoderFactory::CreateByMime(processType_);
140     if (videoEncoder_ == nullptr) {
141         DHLOGE("Create video encoder failed.");
142         return DCAMERA_INIT_ERR;
143     }
144     encodeVideoCallback_ = std::make_shared<EncodeVideoCallback>(shared_from_this());
145     ret = videoEncoder_->SetCallback(encodeVideoCallback_);
146     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
147         DHLOGE("Set video encoder callback failed. Error code %d.", ret);
148         return DCAMERA_INIT_ERR;
149     }
150 
151     ret = videoEncoder_->Configure(metadataFormat_);
152     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
153         DHLOGE("Set video encoder metadata format failed. Error code %d.", ret);
154         return DCAMERA_INIT_ERR;
155     }
156 
157     encodeProducerSurface_ = videoEncoder_->CreateInputSurface();
158     if (encodeProducerSurface_ == nullptr) {
159         DHLOGE("Get video encoder producer surface failed.");
160         return DCAMERA_INIT_ERR;
161     }
162 
163     return DCAMERA_OK;
164 }
165 
InitEncoderMetadataFormat()166 int32_t EncodeDataProcess::InitEncoderMetadataFormat()
167 {
168     processedConfig_ = sourceConfig_;
169     switch (targetConfig_.GetVideoCodecType()) {
170         case VideoCodecType::CODEC_H264:
171             processType_ = "video/avc";
172             metadataFormat_.PutIntValue("codec_profile", Media::AVCProfile::AVC_PROFILE_BASELINE);
173             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H264);
174             break;
175         case VideoCodecType::CODEC_H265:
176             processType_ = "video/hevc";
177             metadataFormat_.PutIntValue("codec_profile", Media::HEVCProfile::HEVC_PROFILE_MAIN);
178             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H265);
179             break;
180         case VideoCodecType::CODEC_MPEG4_ES:
181             processType_ = "video/mp4v-es";
182             metadataFormat_.PutIntValue("codec_profile", Media::MPEG4Profile::MPEG4_PROFILE_ADVANCED_CODING);
183             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_MPEG4_ES);
184             break;
185         default:
186             DHLOGE("The current codec type does not support encoding.");
187             return DCAMERA_NOT_FOUND;
188     }
189     switch (sourceConfig_.GetVideoformat()) {
190         case Videoformat::YUVI420:
191             metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::YUVI420);
192             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
193             break;
194         case Videoformat::NV12:
195             metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12);
196             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
197             break;
198         case Videoformat::NV21:
199             metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV21);
200             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
201             break;
202         case Videoformat::RGBA_8888:
203             metadataFormat_.PutIntValue("pixel_format",  Media::VideoPixelFormat::RGBA);
204             metadataFormat_.PutLongValue("max_input_size", NORM_RGB32_BUFFER_SIZE);
205             break;
206         default:
207             DHLOGE("The current pixel format does not support encoding.");
208             return DCAMERA_NOT_FOUND;
209     }
210     metadataFormat_.PutStringValue("codec_mime", processType_);
211     metadataFormat_.PutIntValue("width", static_cast<int32_t>(sourceConfig_.GetWidth()));
212     metadataFormat_.PutIntValue("height", static_cast<int32_t>(sourceConfig_.GetHeight()));
213     metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE);
214     return DCAMERA_OK;
215 }
216 
InitEncoderBitrateFormat()217 int32_t EncodeDataProcess::InitEncoderBitrateFormat()
218 {
219     DHLOGD("Init video encoder bitrate format.");
220     if (!(IsInEncoderRange(sourceConfig_) && IsInEncoderRange(targetConfig_))) {
221         DHLOGE("Source config or target config are invalid.");
222         return DCAMERA_BAD_VALUE;
223     }
224     metadataFormat_.PutIntValue("i_frame_interval", IDR_FRAME_INTERVAL_MS);
225     metadataFormat_.PutIntValue("video_encode_bitrate_mode", Media::VideoEncodeBitrateMode::VBR);
226 
227     if (ENCODER_BITRATE_TABLE.empty()) {
228         DHLOGD("ENCODER_BITRATE_TABLE is null, use the default bitrate of the encoder.");
229         return DCAMERA_OK;
230     }
231     int64_t pixelformat = static_cast<int64_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight());
232     int32_t matchedBitrate = BITRATE_6000000;
233     int64_t minPixelformatDiff = WIDTH_1920_HEIGHT_1080 - pixelformat;
234     for (auto it = ENCODER_BITRATE_TABLE.begin(); it != ENCODER_BITRATE_TABLE.end(); it++) {
235         int64_t pixelformatDiff = abs(pixelformat - it->first);
236         if (pixelformatDiff == 0) {
237             matchedBitrate = it->second;
238             break;
239         }
240         if (minPixelformatDiff >= pixelformatDiff) {
241             minPixelformatDiff = pixelformatDiff;
242             matchedBitrate = it->second;
243         }
244     }
245     DHLOGD("Source config: width : %d, height : %d, matched bitrate %d.", sourceConfig_.GetWidth(),
246         sourceConfig_.GetHeight(), matchedBitrate);
247     metadataFormat_.PutIntValue("bitrate", matchedBitrate);
248     return DCAMERA_OK;
249 }
250 
StartVideoEncoder()251 int32_t EncodeDataProcess::StartVideoEncoder()
252 {
253     if (videoEncoder_ == nullptr) {
254         DHLOGE("The video encoder does not exist before StopVideoEncoder.");
255         return DCAMERA_BAD_VALUE;
256     }
257 
258     int32_t ret = videoEncoder_->Prepare();
259     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
260         DHLOGE("Video encoder prepare failed. Error code %d.", ret);
261         return DCAMERA_INIT_ERR;
262     }
263     ret = videoEncoder_->Start();
264     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
265         DHLOGE("Video encoder start failed. Error code %d.", ret);
266         return DCAMERA_INIT_ERR;
267     }
268     return DCAMERA_OK;
269 }
270 
StopVideoEncoder()271 int32_t EncodeDataProcess::StopVideoEncoder()
272 {
273     if (videoEncoder_ == nullptr) {
274         DHLOGE("The video encoder does not exist before StopVideoEncoder.");
275         return DCAMERA_BAD_VALUE;
276     }
277 
278     bool isSuccess = true;
279     int32_t ret = videoEncoder_->Flush();
280     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
281         DHLOGE("VideoEncoder flush failed. Error type: %d.", ret);
282         isSuccess = isSuccess && false;
283     }
284     ret = videoEncoder_->Stop();
285     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
286         DHLOGE("VideoEncoder stop failed. Error type: %d.", ret);
287         isSuccess = isSuccess && false;
288     }
289 
290     if (!isSuccess) {
291         return DCAMERA_BAD_OPERATE;
292     }
293     return DCAMERA_OK;
294 }
295 
ReleaseVideoEncoder()296 void EncodeDataProcess::ReleaseVideoEncoder()
297 {
298     std::lock_guard<std::mutex> lck(mtxEncoderState_);
299     DHLOGD("Start release videoEncoder.");
300     if (videoEncoder_ == nullptr) {
301         DHLOGE("The video encoder does not exist before ReleaseVideoEncoder.");
302         encodeProducerSurface_ = nullptr;
303         encodeVideoCallback_ = nullptr;
304         return;
305     }
306     int32_t ret = StopVideoEncoder();
307     if (ret != DCAMERA_OK) {
308         DHLOGE("StopVideoEncoder failed.");
309     }
310     ret = videoEncoder_->Release();
311     if (ret != Media::MediaServiceErrCode::MSERR_OK) {
312         DHLOGE("VideoEncoder release failed. Error type: %d.", ret);
313     }
314     encodeProducerSurface_ = nullptr;
315     videoEncoder_ = nullptr;
316     encodeVideoCallback_ = nullptr;
317     DHLOGD("Start release videoEncoder success.");
318 }
319 
ReleaseProcessNode()320 void EncodeDataProcess::ReleaseProcessNode()
321 {
322     DHLOGD("Start release [%d] node : EncodeNode.", nodeRank_);
323     isEncoderProcess_.store(false);
324     ReleaseVideoEncoder();
325 
326     waitEncoderOutputCount_ = 0;
327     lastFeedEncoderInputBufferTimeUs_ = 0;
328     inputTimeStampUs_ = 0;
329     processType_ = "";
330 
331     if (nextDataProcess_ != nullptr) {
332         nextDataProcess_->ReleaseProcessNode();
333         nextDataProcess_ = nullptr;
334     }
335     DHLOGD("Release [%d] node : EncodeNode end.", nodeRank_);
336 }
337 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)338 int32_t EncodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
339 {
340     DHLOGD("Process data in EncodeDataProcess.");
341     if (inputBuffers.empty()) {
342         DHLOGE("The input data buffers is empty.");
343         return DCAMERA_BAD_VALUE;
344     }
345     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
346         DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.",
347             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
348         return EncodeDone(inputBuffers);
349     }
350     if (videoEncoder_ == nullptr) {
351         DHLOGE("The video encoder does not exist before encoding data.");
352         return DCAMERA_INIT_ERR;
353     }
354     if (inputBuffers[0]->Size() > NORM_YUV420_BUFFER_SIZE) {
355         DHLOGE("EncodeNode input buffer size %d error.", inputBuffers[0]->Size());
356         return DCAMERA_MEMORY_OPT_ERROR;
357     }
358     if (!isEncoderProcess_.load()) {
359         DHLOGE("EncodeNode occurred error or start release.");
360         return DCAMERA_DISABLE_PROCESS;
361     }
362     int32_t err = FeedEncoderInputBuffer(inputBuffers[0]);
363     if (err != DCAMERA_OK) {
364         DHLOGE("Feed encoder input Buffer failed.");
365         return err;
366     }
367     return DCAMERA_OK;
368 }
369 
FeedEncoderInputBuffer(std::shared_ptr<DataBuffer> & inputBuffer)370 int32_t EncodeDataProcess::FeedEncoderInputBuffer(std::shared_ptr<DataBuffer>& inputBuffer)
371 {
372     std::lock_guard<std::mutex> lck(mtxEncoderState_);
373     DHLOGD("Feed encoder input buffer, buffer size %d.", inputBuffer->Size());
374     if (encodeProducerSurface_ == nullptr) {
375         DHLOGE("Get encoder input producer surface failed.");
376         return DCAMERA_INIT_ERR;
377     }
378 
379     sptr<SurfaceBuffer> surfacebuffer = GetEncoderInputSurfaceBuffer();
380     if (surfacebuffer == nullptr) {
381         DHLOGE("Get encoder input producer surface buffer failed.");
382         return DCAMERA_BAD_OPERATE;
383     }
384     uint8_t *addr = static_cast<uint8_t *>(surfacebuffer->GetVirAddr());
385     if (addr == nullptr) {
386         DHLOGE("SurfaceBuffer address is nullptr");
387         encodeProducerSurface_->CancelBuffer(surfacebuffer);
388         return DCAMERA_BAD_OPERATE;
389     }
390     size_t size = static_cast<size_t>(surfacebuffer->GetSize());
391     errno_t err = memcpy_s(addr, size, inputBuffer->Data(), inputBuffer->Size());
392     if (err != EOK) {
393         DHLOGE("memcpy_s encoder input producer surface buffer failed, surBufSize %z.", size);
394         return DCAMERA_MEMORY_OPT_ERROR;
395     }
396 
397     inputTimeStampUs_ = GetEncoderTimeStamp();
398     DHLOGD("Encoder input buffer size %d, timeStamp %lld.", inputBuffer->Size(), (long long)inputTimeStampUs_);
399     surfacebuffer->GetExtraData()->ExtraSet("timeStamp", inputTimeStampUs_);
400 
401     BufferFlushConfig flushConfig = { {0, 0, sourceConfig_.GetWidth(), sourceConfig_.GetHeight()}, 0};
402     SurfaceError ret = encodeProducerSurface_->FlushBuffer(surfacebuffer, -1, flushConfig);
403     if (ret != SURFACE_ERROR_OK) {
404         DHLOGE("Flush encoder input producer surface buffer failed.");
405         return DCAMERA_BAD_OPERATE;
406     }
407     return DCAMERA_OK;
408 }
409 
GetEncoderInputSurfaceBuffer()410 sptr<SurfaceBuffer> EncodeDataProcess::GetEncoderInputSurfaceBuffer()
411 {
412     BufferRequestConfig requestConfig;
413     requestConfig.width = sourceConfig_.GetWidth();
414     requestConfig.height = sourceConfig_.GetHeight();
415     requestConfig.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA;
416     requestConfig.timeout = 0;
417     requestConfig.strideAlignment = ENCODER_STRIDE_ALIGNMENT;
418     switch (sourceConfig_.GetVideoformat()) {
419         case Videoformat::YUVI420:
420             requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_P;
421             break;
422         case Videoformat::NV12:
423             requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_SP;
424             break;
425         case Videoformat::NV21:
426             requestConfig.format = PixelFormat::PIXEL_FMT_YCRCB_420_SP;
427             break;
428         case Videoformat::RGBA_8888:
429             requestConfig.format = PixelFormat::PIXEL_FMT_RGBA_8888;
430             break;
431         default:
432             DHLOGE("The current pixel format does not support encoding.");
433             return nullptr;
434     }
435     sptr<SurfaceBuffer> surfacebuffer = nullptr;
436     int32_t flushFence = -1;
437     GSError err = encodeProducerSurface_->RequestBuffer(surfacebuffer, flushFence, requestConfig);
438     if (err != GSERROR_OK || surfacebuffer == nullptr) {
439         DHLOGE("Request encoder input producer surface buffer failed, error code: %d.", err);
440     }
441     return surfacebuffer;
442 }
443 
GetEncoderTimeStamp()444 int64_t EncodeDataProcess::GetEncoderTimeStamp()
445 {
446     if (inputTimeStampUs_ != 0) {
447         lastFeedEncoderInputBufferTimeUs_ = inputTimeStampUs_;
448     }
449     const int64_t nsPerUs = 1000L;
450     int64_t nowTimeUs = GetNowTimeStampUs() * nsPerUs;
451     return nowTimeUs;
452 }
453 
IncreaseWaitEncodeCnt()454 void EncodeDataProcess::IncreaseWaitEncodeCnt()
455 {
456     std::lock_guard<std::mutex> lck(mtxHoldCount_);
457     if (lastFeedEncoderInputBufferTimeUs_ == 0) {
458         waitEncoderOutputCount_ += FIRST_FRAME_OUTPUT_NUM;
459     } else {
460         waitEncoderOutputCount_++;
461     }
462     DHLOGD("Wait encoder output frames number is %d.", waitEncoderOutputCount_);
463 }
464 
ReduceWaitEncodeCnt()465 void EncodeDataProcess::ReduceWaitEncodeCnt()
466 {
467     std::lock_guard<std::mutex> lck(mtxHoldCount_);
468     if (waitEncoderOutputCount_ <= 0) {
469         DHLOGE("The waitEncoderOutputCount_ = %d.", waitEncoderOutputCount_);
470     }
471     waitEncoderOutputCount_--;
472     DHLOGD("Wait encoder output frames number is %d.", waitEncoderOutputCount_);
473 }
474 
GetEncoderOutputBuffer(uint32_t index,Media::AVCodecBufferInfo info,Media::AVCodecBufferFlag flag)475 int32_t EncodeDataProcess::GetEncoderOutputBuffer(uint32_t index, Media::AVCodecBufferInfo info,
476     Media::AVCodecBufferFlag flag)
477 {
478     DHLOGD("Get encoder output buffer.");
479     if (videoEncoder_ == nullptr) {
480         DHLOGE("The video encoder does not exist before output encoded data.");
481         return DCAMERA_BAD_VALUE;
482     }
483     std::shared_ptr<Media::AVSharedMemory> sharedMemoryOutput = videoEncoder_->GetOutputBuffer(index);
484     if (sharedMemoryOutput == nullptr) {
485         DHLOGE("Failed to get the output shared memory, index : %u", index);
486         return DCAMERA_BAD_OPERATE;
487     }
488 
489     if (info.size <= 0 || info.size > DATABUFF_MAX_SIZE) {
490         DHLOGE("AVCodecBufferInfo error, buffer size : %d", info.size);
491         return DCAMERA_BAD_VALUE;
492     }
493 
494     size_t outputMemoDataSize = static_cast<size_t>(info.size);
495     DHLOGD("Encoder output buffer size : %d", outputMemoDataSize);
496     std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(outputMemoDataSize);
497     errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(),
498         sharedMemoryOutput->GetBase(), outputMemoDataSize);
499     if (err != EOK) {
500         DHLOGE("memcpy_s buffer failed.");
501         return DCAMERA_MEMORY_OPT_ERROR;
502     }
503     int64_t timeStamp = info.presentationTimeUs;
504     struct timespec time = {0, 0};
505     clock_gettime(CLOCK_MONOTONIC, &time);
506     int64_t timeNs = static_cast<int64_t>(time.tv_sec) * S2NS + static_cast<int64_t>(time.tv_nsec);
507     int64_t encodeT = timeNs / static_cast<int64_t>(US2NS) - timeStamp;
508     int64_t finishEncodeT = GetNowTimeStampUs();
509     int64_t startEncodeT = finishEncodeT - encodeT;
510     bufferOutput->SetInt64(START_ENCODE_TIME_US, startEncodeT);
511     bufferOutput->SetInt64(FINISH_ENCODE_TIME_US, finishEncodeT);
512     bufferOutput->SetInt64(TIME_STAMP_US, timeStamp);
513     bufferOutput->SetInt32(FRAME_TYPE, flag);
514     bufferOutput->SetInt32(INDEX, index_);
515     index_++;
516     std::vector<std::shared_ptr<DataBuffer>> nextInputBuffers;
517     nextInputBuffers.push_back(bufferOutput);
518     return EncodeDone(nextInputBuffers);
519 }
520 
EncodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)521 int32_t EncodeDataProcess::EncodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
522 {
523     DHLOGD("Encoder done.");
524     if (outputBuffers.empty()) {
525         DHLOGE("The received data buffers is empty.");
526         return DCAMERA_BAD_VALUE;
527     }
528 
529     if (nextDataProcess_ != nullptr) {
530         DHLOGD("Send to the next node of the encoder for processing.");
531         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
532         if (err != DCAMERA_OK) {
533             DHLOGE("Someone node after the encoder processes failed.");
534         }
535         return err;
536     }
537     DHLOGD("The current node is the last node, and Output the processed video buffer");
538     std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
539     if (targetPipelineSink == nullptr) {
540         DHLOGE("callbackPipelineSink_ is nullptr.");
541         return DCAMERA_BAD_VALUE;
542     }
543     targetPipelineSink->OnProcessedVideoBuffer(outputBuffers[0]);
544     return DCAMERA_OK;
545 }
546 
OnError()547 void EncodeDataProcess::OnError()
548 {
549     DHLOGD("EncodeDataProcess : OnError.");
550     isEncoderProcess_.store(false);
551     if (videoEncoder_ != nullptr) {
552         videoEncoder_->Flush();
553         videoEncoder_->Stop();
554     }
555     std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
556     if (targetPipelineSink == nullptr) {
557         DHLOGE("callbackPipelineSink_ is nullptr.");
558         return;
559     }
560     targetPipelineSink->OnError(DataProcessErrorType::ERROR_PIPELINE_ENCODER);
561 }
562 
OnInputBufferAvailable(uint32_t index)563 void EncodeDataProcess::OnInputBufferAvailable(uint32_t index)
564 {
565     DHLOGD("The available input buffer index : %u. No operation when using surface input.", index);
566 }
567 
OnOutputFormatChanged(const Media::Format & format)568 void EncodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
569 {
570     if (encodeOutputFormat_.GetFormatMap().empty()) {
571         DHLOGE("The first changed video encoder output format is null.");
572         return;
573     }
574     encodeOutputFormat_ = format;
575 }
576 
OnOutputBufferAvailable(uint32_t index,Media::AVCodecBufferInfo info,Media::AVCodecBufferFlag flag)577 void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, Media::AVCodecBufferInfo info,
578     Media::AVCodecBufferFlag flag)
579 {
580     if (!isEncoderProcess_.load()) {
581         DHLOGE("EncodeNode occurred error or start release.");
582         return;
583     }
584     DHLOGD("Video encode buffer info: presentation TimeUs %lld, size %d, offset %d, flag %d",
585         info.presentationTimeUs, info.size, info.offset, flag);
586     int32_t err = GetEncoderOutputBuffer(index, info, flag);
587     if (err != DCAMERA_OK) {
588         DHLOGE("Get encode output Buffer failed.");
589         return;
590     }
591     if (videoEncoder_ == nullptr) {
592         DHLOGE("The video encoder does not exist before release output buffer index.");
593         return;
594     }
595     int32_t errRelease = videoEncoder_->ReleaseOutputBuffer(index);
596     if (errRelease != Media::MediaServiceErrCode::MSERR_OK) {
597         DHLOGE("The video encoder release output buffer failed, index : [%u].", index);
598     }
599 }
600 
GetSourceConfig() const601 VideoConfigParams EncodeDataProcess::GetSourceConfig() const
602 {
603     return sourceConfig_;
604 }
605 
GetTargetConfig() const606 VideoConfigParams EncodeDataProcess::GetTargetConfig() const
607 {
608     return targetConfig_;
609 }
610 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)611 int32_t EncodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
612 {
613     if (propertyName != surfaceStr_) {
614         return DCAMERA_OK;
615     }
616     if (encodeProducerSurface_ == nullptr) {
617         DHLOGD("EncodeDataProcess::GetProperty: encode dataProcess get property fail, encode surface is nullptr.");
618         return DCAMERA_BAD_VALUE;
619     }
620     return propertyCarrier.CarrySurfaceProperty(encodeProducerSurface_);
621 }
622 } // namespace DistributedHardware
623 } // namespace OHOS
624