• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2022-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <cmath>
17 #include "dcamera_hisysevent_adapter.h"
18 #include "dcamera_radar.h"
19 #include "dcamera_utils_tools.h"
20 #include "distributed_hardware_log.h"
21 #include "encode_data_process.h"
22 #include "encode_video_callback.h"
23 #include "graphic_common_c.h"
24 #include <ctime>
25 
26 #ifndef DH_LOG_TAG
27 #define DH_LOG_TAG "DCDP_NODE_ENCODEC"
28 #endif
29 
30 namespace OHOS {
31 namespace DistributedHardware {
32 const std::map<int64_t, int32_t> EncodeDataProcess::ENCODER_BITRATE_TABLE = {
33     std::map<int64_t, int32_t>::value_type(WIDTH_320_HEIGHT_240, BITRATE_500000),
34     std::map<int64_t, int32_t>::value_type(WIDTH_480_HEIGHT_360, BITRATE_1110000),
35     std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_360, BITRATE_1500000),
36     std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_480, BITRATE_1800000),
37     std::map<int64_t, int32_t>::value_type(WIDTH_720_HEIGHT_540, BITRATE_2100000),
38     std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_540, BITRATE_2300000),
39     std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_720, BITRATE_2800000),
40     std::map<int64_t, int32_t>::value_type(WIDTH_1280_HEIGHT_720, BITRATE_3400000),
41     std::map<int64_t, int32_t>::value_type(WIDTH_1440_HEIGHT_1080, BITRATE_5000000),
42     std::map<int64_t, int32_t>::value_type(WIDTH_1920_HEIGHT_1080, BITRATE_6000000),
43 };
44 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
45     "YUVI420", "NV12", "NV21", "RGBA_8888"
46 };
47 
~EncodeDataProcess()48 EncodeDataProcess::~EncodeDataProcess()
49 {
50     if (isEncoderProcess_.load()) {
51         DHLOGD("~EncodeDataProcess : ReleaseProcessNode.");
52         ReleaseProcessNode();
53     }
54 }
55 
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)56 int32_t EncodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
57     VideoConfigParams& processedConfig)
58 {
59     DHLOGD("Init DCamera EncodeNode start.");
60     if (!(IsInEncoderRange(sourceConfig) && IsInEncoderRange(targetConfig))) {
61         DHLOGE("Source config or target config are invalid.");
62         return DCAMERA_BAD_VALUE;
63     }
64     if (!IsConvertible(sourceConfig, targetConfig)) {
65         DHLOGE("The EncodeNode cannot convert source VideoCodecType %{public}d to target VideoCodecType %{public}d.",
66             sourceConfig.GetVideoCodecType(), targetConfig.GetVideoCodecType());
67         return DCAMERA_BAD_TYPE;
68     }
69 
70     sourceConfig_ = sourceConfig;
71     targetConfig_ = targetConfig;
72     if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
73         DHLOGD("Disable EncodeNode. The target VideoCodecType %{public}d is the same as the source VideoCodecType "
74             "%{public}d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType());
75         processedConfig_ = sourceConfig;
76         processedConfig = processedConfig_;
77         {
78             std::unique_lock<std::mutex> lock(isEncoderProcessMtx_);
79             isEncoderProcess_.store(true);
80         }
81         isEncoderProcessCond_.notify_one();
82         return DCAMERA_OK;
83     }
84 
85     int32_t err = InitEncoder();
86     if (err != DCAMERA_OK) {
87         DHLOGE("Init video encoder failed.");
88         ReleaseProcessNode();
89         return err;
90     }
91     processedConfig = processedConfig_;
92     {
93         std::unique_lock<std::mutex> lock(isEncoderProcessMtx_);
94         isEncoderProcess_.store(true);
95     }
96     isEncoderProcessCond_.notify_one();
97     return DCAMERA_OK;
98 }
99 
IsInEncoderRange(const VideoConfigParams & curConfig)100 bool EncodeDataProcess::IsInEncoderRange(const VideoConfigParams& curConfig)
101 {
102     bool isWidthValid = (curConfig.GetWidth() >= MIN_VIDEO_WIDTH && curConfig.GetWidth() <= MAX_VIDEO_WIDTH);
103     bool isHeightValid = (curConfig.GetHeight() >= MIN_VIDEO_HEIGHT && curConfig.GetHeight() <= MAX_VIDEO_HEIGHT);
104     bool isFrameRateValid = (curConfig.GetFrameRate() >= MIN_FRAME_RATE && curConfig.GetFrameRate() <= MAX_FRAME_RATE);
105     return isWidthValid && isHeightValid && isFrameRateValid;
106 }
107 
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)108 bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
109 {
110     return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
111         sourceConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
112 }
113 
InitEncoder()114 int32_t EncodeDataProcess::InitEncoder()
115 {
116     DHLOGD("Init video encoder.");
117     int32_t ret = ConfigureVideoEncoder();
118     if (ret != DCAMERA_OK) {
119         DHLOGE("Init video encoder metadata format failed. ret %{public}d.", ret);
120         return ret;
121     }
122 
123     ret = StartVideoEncoder();
124     if (ret != DCAMERA_OK) {
125         DHLOGE("Start Video encoder failed.");
126         ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_ENCODE_ERROR,
127             CreateMsg("start video encoder failed, width: %d, height: %d, format: %s",
128             sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
129             ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
130         return ret;
131     }
132 
133     return DCAMERA_OK;
134 }
135 
ConfigureVideoEncoder()136 int32_t EncodeDataProcess::ConfigureVideoEncoder()
137 {
138     int32_t ret = InitEncoderMetadataFormat();
139     CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
140         "Init video encoder metadata format failed. ret %{public}d.", ret);
141     ret = InitEncoderBitrateFormat();
142     CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
143         "Init video encoder bitrate format failed. ret %{public}d.", ret);
144     videoEncoder_ = MediaAVCodec::VideoEncoderFactory::CreateByMime(processType_);
145     if (videoEncoder_ == nullptr) {
146         DHLOGE("Create video encoder failed.");
147         return DCAMERA_INIT_ERR;
148     }
149     encodeVideoCallback_ = std::make_shared<EncodeVideoCallback>(shared_from_this());
150     ret = videoEncoder_->SetCallback(encodeVideoCallback_);
151     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
152         DHLOGE("Set video encoder callback failed. ret %{public}d.", ret);
153         return DCAMERA_INIT_ERR;
154     }
155 
156     ret = videoEncoder_->Configure(metadataFormat_);
157     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
158         "Set video encoder metadata format failed. ret %{public}d.", ret);
159 
160     encodeProducerSurface_ = videoEncoder_->CreateInputSurface();
161     CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_INIT_ERR,
162         "%s", "Get video encoder producer surface failed.");
163 
164     return DCAMERA_OK;
165 }
166 
InitEncoderMetadataFormat()167 int32_t EncodeDataProcess::InitEncoderMetadataFormat()
168 {
169     processedConfig_ = sourceConfig_;
170     switch (targetConfig_.GetVideoCodecType()) {
171         case VideoCodecType::CODEC_H264:
172             processType_ = "video/avc";
173             metadataFormat_.PutIntValue("codec_profile", MediaAVCodec::AVCProfile::AVC_PROFILE_BASELINE);
174             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H264);
175             break;
176         case VideoCodecType::CODEC_H265:
177             processType_ = "video/hevc";
178             metadataFormat_.PutIntValue("codec_profile", MediaAVCodec::HEVCProfile::HEVC_PROFILE_MAIN);
179             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H265);
180             break;
181         case VideoCodecType::CODEC_MPEG4_ES:
182             processType_ = "video/mp4v-es";
183             metadataFormat_.PutIntValue("codec_profile",
184                 MediaAVCodec::MPEG4Profile::MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY);
185             processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_MPEG4_ES);
186             break;
187         default:
188             DHLOGE("The current codec type does not support encoding.");
189             return DCAMERA_NOT_FOUND;
190     }
191     switch (sourceConfig_.GetVideoformat()) {
192         case Videoformat::YUVI420:
193             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::YUVI420));
194             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
195             break;
196         case Videoformat::NV12:
197             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
198             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
199             break;
200         case Videoformat::NV21:
201             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV21));
202             metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
203             break;
204         case Videoformat::RGBA_8888:
205             metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::RGBA));
206             metadataFormat_.PutLongValue("max_input_size", NORM_RGB32_BUFFER_SIZE);
207             break;
208         default:
209             DHLOGE("The current pixel format does not support encoding.");
210             return DCAMERA_NOT_FOUND;
211     }
212     metadataFormat_.PutStringValue("codec_mime", processType_);
213     metadataFormat_.PutIntValue("width", static_cast<int32_t>(sourceConfig_.GetWidth()));
214     metadataFormat_.PutIntValue("height", static_cast<int32_t>(sourceConfig_.GetHeight()));
215     metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
216     return DCAMERA_OK;
217 }
218 
InitEncoderBitrateFormat()219 int32_t EncodeDataProcess::InitEncoderBitrateFormat()
220 {
221     DHLOGD("Init video encoder bitrate format.");
222     CHECK_AND_RETURN_RET_LOG(!(IsInEncoderRange(sourceConfig_) && IsInEncoderRange(targetConfig_)), DCAMERA_BAD_VALUE,
223         "%{public}s", "Source config or target config are invalid.");
224     metadataFormat_.PutIntValue("i_frame_interval", IDR_FRAME_INTERVAL_MS);
225     metadataFormat_.PutIntValue("video_encode_bitrate_mode", MediaAVCodec::VideoEncodeBitrateMode::VBR);
226 
227     CHECK_AND_RETURN_RET_LOG(ENCODER_BITRATE_TABLE.empty(), DCAMERA_OK, "%{public}s",
228         "ENCODER_BITRATE_TABLE is null, use the default bitrate of the encoder.");
229     int64_t pixelformat = static_cast<int64_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight());
230     int32_t matchedBitrate = BITRATE_6000000;
231     int64_t minPixelformatDiff = WIDTH_1920_HEIGHT_1080 - pixelformat;
232     for (auto it = ENCODER_BITRATE_TABLE.begin(); it != ENCODER_BITRATE_TABLE.end(); it++) {
233         int64_t pixelformatDiff = abs(pixelformat - it->first);
234         if (pixelformatDiff == 0) {
235             matchedBitrate = it->second;
236             break;
237         }
238         if (minPixelformatDiff >= pixelformatDiff) {
239             minPixelformatDiff = pixelformatDiff;
240             matchedBitrate = it->second;
241         }
242     }
243     DHLOGD("Source config: width : %{public}d, height : %{public}d, matched bitrate %{public}d.",
244         sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), matchedBitrate);
245     metadataFormat_.PutIntValue("bitrate", matchedBitrate);
246     return DCAMERA_OK;
247 }
248 
StartVideoEncoder()249 int32_t EncodeDataProcess::StartVideoEncoder()
250 {
251     if (videoEncoder_ == nullptr) {
252         DHLOGE("The video encoder does not exist before StopVideoEncoder.");
253         return DCAMERA_BAD_VALUE;
254     }
255 
256     int32_t ret = videoEncoder_->Prepare();
257     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
258         "Video encoder prepare failed. ret %{public}d.", ret);
259     ret = videoEncoder_->Start();
260     CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
261         "Video encoder start failed. ret %{public}d.", ret);
262     return DCAMERA_OK;
263 }
264 
StopVideoEncoder()265 int32_t EncodeDataProcess::StopVideoEncoder()
266 {
267     if (videoEncoder_ == nullptr) {
268         DHLOGE("The video encoder does not exist before StopVideoEncoder.");
269         return DCAMERA_BAD_VALUE;
270     }
271 
272     bool isSuccess = true;
273     int32_t ret = videoEncoder_->Flush();
274     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
275         DHLOGE("VideoEncoder flush failed. ret %{public}d.", ret);
276         isSuccess = isSuccess && false;
277     }
278     ret = videoEncoder_->Stop();
279     if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
280         DHLOGE("VideoEncoder stop failed. ret %{public}d.", ret);
281         isSuccess = isSuccess && false;
282     }
283 
284     if (!isSuccess) {
285         return DCAMERA_BAD_OPERATE;
286     }
287     return DCAMERA_OK;
288 }
289 
ReleaseVideoEncoder()290 void EncodeDataProcess::ReleaseVideoEncoder()
291 {
292     std::lock_guard<std::mutex> lck(mtxEncoderState_);
293     DHLOGD("Start release videoEncoder.");
294     if (videoEncoder_ == nullptr) {
295         DHLOGE("The video encoder does not exist before ReleaseVideoEncoder.");
296         encodeProducerSurface_ = nullptr;
297         encodeVideoCallback_ = nullptr;
298         return;
299     }
300     int32_t ret = StopVideoEncoder();
301     CHECK_AND_LOG(ret != DCAMERA_OK, "%{public}s", "StopVideoEncoder failed.");
302     ret = videoEncoder_->Release();
303     CHECK_AND_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
304         "VideoEncoder release failed. ret %{public}d.", ret);
305     encodeProducerSurface_ = nullptr;
306     videoEncoder_ = nullptr;
307     encodeVideoCallback_ = nullptr;
308     DHLOGD("Start release videoEncoder success.");
309 }
310 
ReleaseProcessNode()311 void EncodeDataProcess::ReleaseProcessNode()
312 {
313     DHLOGD("Start release [%{public}zu] node : EncodeNode.", nodeRank_);
314     isEncoderProcess_.store(false);
315     ReleaseVideoEncoder();
316 
317     {
318         std::lock_guard<std::mutex> lck(mtxHoldCount_);
319         waitEncoderOutputCount_ = 0;
320     }
321     lastFeedEncoderInputBufferTimeUs_ = 0;
322     inputTimeStampUs_ = 0;
323     processType_ = "";
324 
325     if (nextDataProcess_ != nullptr) {
326         nextDataProcess_->ReleaseProcessNode();
327         nextDataProcess_ = nullptr;
328     }
329     DHLOGD("Release [%{public}zu] node : EncodeNode end.", nodeRank_);
330 }
331 
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)332 int32_t EncodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
333 {
334     DHLOGD("Process data in EncodeDataProcess.");
335     if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
336         DHLOGE("The input data buffers is empty.");
337         return DCAMERA_BAD_VALUE;
338     }
339     if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
340         DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
341             sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
342         return EncodeDone(inputBuffers);
343     }
344     if (videoEncoder_ == nullptr) {
345         DHLOGE("The video encoder does not exist before encoding data.");
346         return DCAMERA_INIT_ERR;
347     }
348     if (inputBuffers[0]->Size() > NORM_YUV420_BUFFER_SIZE) {
349         DHLOGE("EncodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
350         return DCAMERA_MEMORY_OPT_ERROR;
351     }
352     CHECK_AND_RETURN_RET_LOG(!isEncoderProcess_.load(), DCAMERA_DISABLE_PROCESS, "%{public}s",
353         "EncodeNode occurred error or start release.");
354     int32_t err = FeedEncoderInputBuffer(inputBuffers[0]);
355     CHECK_AND_RETURN_RET_LOG(err != DCAMERA_OK, err, "%{public}s", "Feed encoder input Buffer failed.");
356     return DCAMERA_OK;
357 }
358 
FeedEncoderInputBuffer(std::shared_ptr<DataBuffer> & inputBuffer)359 int32_t EncodeDataProcess::FeedEncoderInputBuffer(std::shared_ptr<DataBuffer>& inputBuffer)
360 {
361     std::lock_guard<std::mutex> lck(mtxEncoderState_);
362     DHLOGD("Feed encoder input buffer, buffer size %{public}zu.", inputBuffer->Size());
363     CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_INIT_ERR, "%{public}s",
364         "Get encoder input producer surface failed.");
365     sptr<SurfaceBuffer> surfacebuffer = GetEncoderInputSurfaceBuffer();
366     CHECK_AND_RETURN_RET_LOG(surfacebuffer == nullptr, DCAMERA_BAD_OPERATE, "%{public}s",
367         "Get encoder input producer surface buffer failed.");
368     uint8_t *addr = static_cast<uint8_t *>(surfacebuffer->GetVirAddr());
369     if (addr == nullptr) {
370         DHLOGE("SurfaceBuffer address is nullptr");
371         encodeProducerSurface_->CancelBuffer(surfacebuffer);
372         return DCAMERA_BAD_OPERATE;
373     }
374     size_t size = static_cast<size_t>(surfacebuffer->GetSize());
375     errno_t err = memcpy_s(addr, size, inputBuffer->Data(), inputBuffer->Size());
376     CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR,
377         "memcpy_s encoder input producer surfacebuffer failed, surBufSize %{public}zu.", size);
378 
379     inputTimeStampUs_ = GetEncoderTimeStamp();
380     DHLOGD("Encoder input buffer size %{public}zu, timeStamp %{public}lld.", inputBuffer->Size(),
381         (long long)inputTimeStampUs_);
382     if (surfacebuffer->GetExtraData() == nullptr) {
383         DHLOGE("Surface buffer exist null extra data.");
384         return DCAMERA_BAD_OPERATE;
385     }
386     surfacebuffer->GetExtraData()->ExtraSet("timeStamp", inputTimeStampUs_);
387 
388     BufferFlushConfig flushConfig = { {0, 0, sourceConfig_.GetWidth(), sourceConfig_.GetHeight()}, 0};
389     SurfaceError ret = encodeProducerSurface_->FlushBuffer(surfacebuffer, -1, flushConfig);
390     CHECK_AND_RETURN_RET_LOG(ret != SURFACE_ERROR_OK, DCAMERA_BAD_OPERATE, "%s",
391         "Flush encoder input producer surface buffer failed.");
392     return DCAMERA_OK;
393 }
394 
GetEncoderInputSurfaceBuffer()395 sptr<SurfaceBuffer> EncodeDataProcess::GetEncoderInputSurfaceBuffer()
396 {
397     BufferRequestConfig requestConfig;
398     requestConfig.width = sourceConfig_.GetWidth();
399     requestConfig.height = sourceConfig_.GetHeight();
400     requestConfig.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA;
401     requestConfig.timeout = 0;
402     requestConfig.strideAlignment = ENCODER_STRIDE_ALIGNMENT;
403     switch (sourceConfig_.GetVideoformat()) {
404         case Videoformat::YUVI420:
405             requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_P;
406             break;
407         case Videoformat::NV12:
408             requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_SP;
409             break;
410         case Videoformat::NV21:
411             requestConfig.format = PixelFormat::PIXEL_FMT_YCRCB_420_SP;
412             break;
413         case Videoformat::RGBA_8888:
414             requestConfig.format = PixelFormat::PIXEL_FMT_RGBA_8888;
415             break;
416         default:
417             DHLOGE("The current pixel format does not support encoding.");
418             return nullptr;
419     }
420     sptr<SurfaceBuffer> surfacebuffer = nullptr;
421     int32_t flushFence = -1;
422     if (encodeProducerSurface_ == nullptr) {
423         DHLOGE("Encode producer surface is null.");
424         return nullptr;
425     }
426     GSError err = encodeProducerSurface_->RequestBuffer(surfacebuffer, flushFence, requestConfig);
427     if (err != GSERROR_OK || surfacebuffer == nullptr) {
428         DHLOGE("Request encoder input producer surface buffer failed, error code: %d.", err);
429     }
430     return surfacebuffer;
431 }
432 
GetEncoderTimeStamp()433 int64_t EncodeDataProcess::GetEncoderTimeStamp()
434 {
435     if (inputTimeStampUs_ != 0) {
436         lastFeedEncoderInputBufferTimeUs_ = inputTimeStampUs_;
437     }
438     const int64_t nsPerUs = 1000L;
439     int64_t nowTimeUs = GetNowTimeStampUs() * nsPerUs;
440     return nowTimeUs;
441 }
442 
IncreaseWaitEncodeCnt()443 void EncodeDataProcess::IncreaseWaitEncodeCnt()
444 {
445     std::lock_guard<std::mutex> lck(mtxHoldCount_);
446     if (lastFeedEncoderInputBufferTimeUs_ == 0) {
447         waitEncoderOutputCount_ += FIRST_FRAME_OUTPUT_NUM;
448     } else {
449         waitEncoderOutputCount_++;
450     }
451     DHLOGD("Wait encoder output frames number is %{public}d.", waitEncoderOutputCount_);
452 }
453 
ReduceWaitEncodeCnt()454 void EncodeDataProcess::ReduceWaitEncodeCnt()
455 {
456     std::lock_guard<std::mutex> lck(mtxHoldCount_);
457     if (waitEncoderOutputCount_ <= 0) {
458         DHLOGE("The waitEncoderOutputCount_ = %{public}d.", waitEncoderOutputCount_);
459     }
460     waitEncoderOutputCount_--;
461     DHLOGD("Wait encoder output frames number is %{public}d.", waitEncoderOutputCount_);
462 }
463 
GetEncoderOutputBuffer(uint32_t index,MediaAVCodec::AVCodecBufferInfo info,MediaAVCodec::AVCodecBufferFlag flag,std::shared_ptr<Media::AVSharedMemory> & buffer)464 int32_t EncodeDataProcess::GetEncoderOutputBuffer(uint32_t index, MediaAVCodec::AVCodecBufferInfo info,
465     MediaAVCodec::AVCodecBufferFlag flag, std::shared_ptr<Media::AVSharedMemory>& buffer)
466 {
467     DHLOGD("Get encoder output buffer.");
468     if (videoEncoder_ == nullptr) {
469         DHLOGE("The video encoder does not exist before output encoded data.");
470         return DCAMERA_BAD_VALUE;
471     }
472     if (buffer == nullptr) {
473         DHLOGE("Failed to get the output shared memory, index : %{public}u", index);
474         return DCAMERA_BAD_OPERATE;
475     }
476 
477     CHECK_AND_RETURN_RET_LOG(info.size <= 0 || info.size > DATABUFF_MAX_SIZE, DCAMERA_BAD_VALUE,
478         "AVCodecBufferInfo error, buffer size : %{public}d", info.size);
479     size_t outputMemoDataSize = static_cast<size_t>(info.size);
480     CHECK_AND_RETURN_RET_LOG(buffer->GetBase() == nullptr, DCAMERA_BAD_OPERATE,
481         "Sink point check failed: Source buffer base is null.");
482     int64_t sourceAllocatedSize = buffer->GetSize();
483     if (sourceAllocatedSize < 0) {
484         DHLOGE("Sink point check failed: buffer->GetSize() returned a negative error code.");
485         return DCAMERA_BAD_VALUE;
486     }
487     if (outputMemoDataSize > static_cast<size_t>(sourceAllocatedSize)) {
488         DHLOGE("Sink point check failed: outputMemoDataSize exceeds source allocated size.");
489         return DCAMERA_BAD_VALUE;
490     }
491     DHLOGD("Encoder output buffer size : %{public}zu", outputMemoDataSize);
492     std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(outputMemoDataSize);
493     CHECK_AND_RETURN_RET_LOG(bufferOutput->Data() == nullptr, DCAMERA_MEMORY_OPT_ERROR,
494         "Sink point check failed: Failed to allocate output buffer.");
495     errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(),
496         buffer->GetBase(), outputMemoDataSize);
497     CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "%{public}s", "memcpy_s buffer failed.");
498     int64_t timeStamp = info.presentationTimeUs;
499     struct timespec time = {0, 0};
500     clock_gettime(CLOCK_MONOTONIC, &time);
501     int64_t timeNs = static_cast<int64_t>(time.tv_sec) * S2NS + static_cast<int64_t>(time.tv_nsec);
502     int64_t encodeT = (timeNs - timeStamp) / static_cast<int64_t>(US2NS);
503     int64_t finishEncodeT = GetNowTimeStampUs();
504     int64_t startEncodeT = finishEncodeT - encodeT;
505     bufferOutput->SetInt64(START_ENCODE_TIME_US, startEncodeT);
506     bufferOutput->SetInt64(FINISH_ENCODE_TIME_US, finishEncodeT);
507     bufferOutput->SetInt64(TIME_STAMP_US, timeStamp);
508     bufferOutput->SetInt32(FRAME_TYPE, flag);
509     bufferOutput->SetInt32(INDEX, index_);
510     index_++;
511     std::vector<std::shared_ptr<DataBuffer>> nextInputBuffers;
512     nextInputBuffers.push_back(bufferOutput);
513     return EncodeDone(nextInputBuffers);
514 }
515 
EncodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)516 int32_t EncodeDataProcess::EncodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
517 {
518     DHLOGD("Encoder done.");
519     if (outputBuffers.empty()) {
520         DHLOGE("The received data buffers is empty.");
521         return DCAMERA_BAD_VALUE;
522     }
523 
524     if (nextDataProcess_ != nullptr) {
525         DHLOGD("Send to the next node of the encoder for processing.");
526         int32_t err = nextDataProcess_->ProcessData(outputBuffers);
527         CHECK_AND_LOG(err != DCAMERA_OK, "%{public}s", "Someone node after the encoder processes failed.");
528         return err;
529     }
530     DHLOGD("The current node is the last node, and Output the processed video buffer");
531     std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
532     if (targetPipelineSink == nullptr) {
533         DHLOGE("callbackPipelineSink_ is nullptr.");
534         return DCAMERA_BAD_VALUE;
535     }
536     targetPipelineSink->OnProcessedVideoBuffer(outputBuffers[0]);
537     return DCAMERA_OK;
538 }
539 
OnError()540 void EncodeDataProcess::OnError()
541 {
542     DHLOGD("EncodeDataProcess : OnError.");
543     isEncoderProcess_.store(false);
544     if (videoEncoder_ != nullptr) {
545         videoEncoder_->Flush();
546         videoEncoder_->Stop();
547     }
548     std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
549     CHECK_AND_RETURN_LOG(targetPipelineSink == nullptr, "%{public}s", "callbackPipelineSink_ is nullptr.");
550     targetPipelineSink->OnError(DataProcessErrorType::ERROR_PIPELINE_ENCODER);
551 }
552 
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)553 void EncodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
554 {
555     DHLOGD("The available input buffer index : %{public}u. No operation when using input.", index);
556 }
557 
OnOutputFormatChanged(const Media::Format & format)558 void EncodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
559 {
560     if (encodeOutputFormat_.GetFormatMap().empty()) {
561         DHLOGE("The first changed video encoder output format is null.");
562         return;
563     }
564     encodeOutputFormat_ = format;
565 }
566 
OnOutputBufferAvailable(uint32_t index,MediaAVCodec::AVCodecBufferInfo info,MediaAVCodec::AVCodecBufferFlag flag,std::shared_ptr<Media::AVSharedMemory> buffer)567 void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, MediaAVCodec::AVCodecBufferInfo info,
568     MediaAVCodec::AVCodecBufferFlag flag, std::shared_ptr<Media::AVSharedMemory> buffer)
569 {
570     DHLOGI("Waiting for encoder process to become available...");
571     std::unique_lock<std::mutex> lock(isEncoderProcessMtx_);
572     bool timeOut = !isEncoderProcessCond_.wait_for(lock, TIMEOUT_3_SEC, [this] {
573         return isEncoderProcess_.load();
574     });
575     if (timeOut) {
576         DHLOGE("Timed out waiting for encoder process after 3 second.");
577         return;
578     }
579     DHLOGD("Video encode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, "
580         "flag %{public}d", info.presentationTimeUs, info.size, info.offset, flag);
581     int32_t err = GetEncoderOutputBuffer(index, info, flag, buffer);
582     if (err != DCAMERA_OK) {
583         DHLOGE("Get encode output Buffer failed.");
584         return;
585     }
586     CHECK_AND_RETURN_LOG(videoEncoder_ == nullptr, "%{public}s",
587         "The video encoder does not exist before release output buffer index.");
588     int32_t errRelease = videoEncoder_->ReleaseOutputBuffer(index);
589     CHECK_AND_LOG(errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
590         "The video encoder release output buffer failed, index : [%{public}u].", index);
591 }
592 
GetSourceConfig() const593 VideoConfigParams EncodeDataProcess::GetSourceConfig() const
594 {
595     return sourceConfig_;
596 }
597 
GetTargetConfig() const598 VideoConfigParams EncodeDataProcess::GetTargetConfig() const
599 {
600     return targetConfig_;
601 }
602 
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)603 int32_t EncodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
604 {
605     if (propertyName != surfaceStr_) {
606         return DCAMERA_OK;
607     }
608     CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_BAD_VALUE, "%{public}s",
609         "EncodeDataProcess::GetProperty: encode dataProcess get property fail, encode surface is nullptr.");
610     encodeProducerSurface_->SetDefaultUsage(encodeProducerSurface_->GetDefaultUsage() & (~BUFFER_USAGE_VIDEO_ENCODER));
611     return propertyCarrier.CarrySurfaceProperty(encodeProducerSurface_);
612 }
613 } // namespace DistributedHardware
614 } // namespace OHOS
615