1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <cmath>
17 #include "dcamera_hisysevent_adapter.h"
18 #include "dcamera_radar.h"
19 #include "dcamera_utils_tools.h"
20 #include "distributed_hardware_log.h"
21 #include "encode_data_process.h"
22 #include "encode_video_callback.h"
23 #include "graphic_common_c.h"
24 #include <ctime>
25
26 #ifndef DH_LOG_TAG
27 #define DH_LOG_TAG "DCDP_NODE_ENCODEC"
28 #endif
29
30 namespace OHOS {
31 namespace DistributedHardware {
32 const std::map<int64_t, int32_t> EncodeDataProcess::ENCODER_BITRATE_TABLE = {
33 std::map<int64_t, int32_t>::value_type(WIDTH_320_HEIGHT_240, BITRATE_500000),
34 std::map<int64_t, int32_t>::value_type(WIDTH_480_HEIGHT_360, BITRATE_1110000),
35 std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_360, BITRATE_1500000),
36 std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_480, BITRATE_1800000),
37 std::map<int64_t, int32_t>::value_type(WIDTH_720_HEIGHT_540, BITRATE_2100000),
38 std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_540, BITRATE_2300000),
39 std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_720, BITRATE_2800000),
40 std::map<int64_t, int32_t>::value_type(WIDTH_1280_HEIGHT_720, BITRATE_3400000),
41 std::map<int64_t, int32_t>::value_type(WIDTH_1440_HEIGHT_1080, BITRATE_5000000),
42 std::map<int64_t, int32_t>::value_type(WIDTH_1920_HEIGHT_1080, BITRATE_6000000),
43 };
44 const std::string ENUM_VIDEOFORMAT_STRINGS[] = {
45 "YUVI420", "NV12", "NV21", "RGBA_8888"
46 };
47
~EncodeDataProcess()48 EncodeDataProcess::~EncodeDataProcess()
49 {
50 if (isEncoderProcess_.load()) {
51 DHLOGD("~EncodeDataProcess : ReleaseProcessNode.");
52 ReleaseProcessNode();
53 }
54 }
55
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)56 int32_t EncodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
57 VideoConfigParams& processedConfig)
58 {
59 DHLOGD("Init DCamera EncodeNode start.");
60 if (!(IsInEncoderRange(sourceConfig) && IsInEncoderRange(targetConfig))) {
61 DHLOGE("Source config or target config are invalid.");
62 return DCAMERA_BAD_VALUE;
63 }
64 if (!IsConvertible(sourceConfig, targetConfig)) {
65 DHLOGE("The EncodeNode cannot convert source VideoCodecType %{public}d to target VideoCodecType %{public}d.",
66 sourceConfig.GetVideoCodecType(), targetConfig.GetVideoCodecType());
67 return DCAMERA_BAD_TYPE;
68 }
69
70 sourceConfig_ = sourceConfig;
71 targetConfig_ = targetConfig;
72 if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
73 DHLOGD("Disable EncodeNode. The target VideoCodecType %{public}d is the same as the source VideoCodecType "
74 "%{public}d.", sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType());
75 processedConfig_ = sourceConfig;
76 processedConfig = processedConfig_;
77 isEncoderProcess_.store(true);
78 return DCAMERA_OK;
79 }
80
81 int32_t err = InitEncoder();
82 if (err != DCAMERA_OK) {
83 DHLOGE("Init video encoder failed.");
84 ReleaseProcessNode();
85 return err;
86 }
87 processedConfig = processedConfig_;
88 isEncoderProcess_.store(true);
89 return DCAMERA_OK;
90 }
91
IsInEncoderRange(const VideoConfigParams & curConfig)92 bool EncodeDataProcess::IsInEncoderRange(const VideoConfigParams& curConfig)
93 {
94 return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
95 curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
96 curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
97 }
98
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)99 bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
100 {
101 return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
102 sourceConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
103 }
104
InitEncoder()105 int32_t EncodeDataProcess::InitEncoder()
106 {
107 DHLOGD("Init video encoder.");
108 int32_t ret = ConfigureVideoEncoder();
109 if (ret != DCAMERA_OK) {
110 DHLOGE("Init video encoder metadata format failed. ret %{public}d.", ret);
111 return ret;
112 }
113
114 ret = StartVideoEncoder();
115 if (ret != DCAMERA_OK) {
116 DHLOGE("Start Video encoder failed.");
117 ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_ENCODE_ERROR,
118 CreateMsg("start video encoder failed, width: %d, height: %d, format: %s",
119 sourceConfig_.GetWidth(), sourceConfig_.GetHeight(),
120 ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
121 return ret;
122 }
123
124 return DCAMERA_OK;
125 }
126
ConfigureVideoEncoder()127 int32_t EncodeDataProcess::ConfigureVideoEncoder()
128 {
129 int32_t ret = InitEncoderMetadataFormat();
130 CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
131 "Init video encoder metadata format failed. ret %{public}d.", ret);
132 ret = InitEncoderBitrateFormat();
133 CHECK_AND_RETURN_RET_LOG(ret != DCAMERA_OK, ret,
134 "Init video encoder bitrate format failed. ret %{public}d.", ret);
135 videoEncoder_ = MediaAVCodec::VideoEncoderFactory::CreateByMime(processType_);
136 if (videoEncoder_ == nullptr) {
137 DHLOGE("Create video encoder failed.");
138 return DCAMERA_INIT_ERR;
139 }
140 encodeVideoCallback_ = std::make_shared<EncodeVideoCallback>(shared_from_this());
141 ret = videoEncoder_->SetCallback(encodeVideoCallback_);
142 if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
143 DHLOGE("Set video encoder callback failed. ret %{public}d.", ret);
144 return DCAMERA_INIT_ERR;
145 }
146
147 ret = videoEncoder_->Configure(metadataFormat_);
148 CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
149 "Set video encoder metadata format failed. ret %{public}d.", ret);
150
151 encodeProducerSurface_ = videoEncoder_->CreateInputSurface();
152 CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_INIT_ERR,
153 "%s", "Get video encoder producer surface failed.");
154
155 return DCAMERA_OK;
156 }
157
InitEncoderMetadataFormat()158 int32_t EncodeDataProcess::InitEncoderMetadataFormat()
159 {
160 processedConfig_ = sourceConfig_;
161 switch (targetConfig_.GetVideoCodecType()) {
162 case VideoCodecType::CODEC_H264:
163 processType_ = "video/avc";
164 metadataFormat_.PutIntValue("codec_profile", MediaAVCodec::AVCProfile::AVC_PROFILE_BASELINE);
165 processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H264);
166 break;
167 case VideoCodecType::CODEC_H265:
168 processType_ = "video/hevc";
169 metadataFormat_.PutIntValue("codec_profile", MediaAVCodec::HEVCProfile::HEVC_PROFILE_MAIN);
170 processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H265);
171 break;
172 case VideoCodecType::CODEC_MPEG4_ES:
173 processType_ = "video/mp4v-es";
174 metadataFormat_.PutIntValue("codec_profile",
175 MediaAVCodec::MPEG4Profile::MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY);
176 processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_MPEG4_ES);
177 break;
178 default:
179 DHLOGE("The current codec type does not support encoding.");
180 return DCAMERA_NOT_FOUND;
181 }
182 switch (sourceConfig_.GetVideoformat()) {
183 case Videoformat::YUVI420:
184 metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::YUVI420));
185 metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
186 break;
187 case Videoformat::NV12:
188 metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV12));
189 metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
190 break;
191 case Videoformat::NV21:
192 metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::NV21));
193 metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
194 break;
195 case Videoformat::RGBA_8888:
196 metadataFormat_.PutIntValue("pixel_format", static_cast<int32_t>(MediaAVCodec::VideoPixelFormat::RGBA));
197 metadataFormat_.PutLongValue("max_input_size", NORM_RGB32_BUFFER_SIZE);
198 break;
199 default:
200 DHLOGE("The current pixel format does not support encoding.");
201 return DCAMERA_NOT_FOUND;
202 }
203 metadataFormat_.PutStringValue("codec_mime", processType_);
204 metadataFormat_.PutIntValue("width", static_cast<int32_t>(sourceConfig_.GetWidth()));
205 metadataFormat_.PutIntValue("height", static_cast<int32_t>(sourceConfig_.GetHeight()));
206 metadataFormat_.PutDoubleValue("frame_rate", MAX_FRAME_RATE);
207 return DCAMERA_OK;
208 }
209
InitEncoderBitrateFormat()210 int32_t EncodeDataProcess::InitEncoderBitrateFormat()
211 {
212 DHLOGD("Init video encoder bitrate format.");
213 CHECK_AND_RETURN_RET_LOG(!(IsInEncoderRange(sourceConfig_) && IsInEncoderRange(targetConfig_)), DCAMERA_BAD_VALUE,
214 "%{public}s", "Source config or target config are invalid.");
215 metadataFormat_.PutIntValue("i_frame_interval", IDR_FRAME_INTERVAL_MS);
216 metadataFormat_.PutIntValue("video_encode_bitrate_mode", MediaAVCodec::VideoEncodeBitrateMode::VBR);
217
218 CHECK_AND_RETURN_RET_LOG(ENCODER_BITRATE_TABLE.empty(), DCAMERA_OK, "%{public}s",
219 "ENCODER_BITRATE_TABLE is null, use the default bitrate of the encoder.");
220 int64_t pixelformat = static_cast<int64_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight());
221 int32_t matchedBitrate = BITRATE_6000000;
222 int64_t minPixelformatDiff = WIDTH_1920_HEIGHT_1080 - pixelformat;
223 for (auto it = ENCODER_BITRATE_TABLE.begin(); it != ENCODER_BITRATE_TABLE.end(); it++) {
224 int64_t pixelformatDiff = abs(pixelformat - it->first);
225 if (pixelformatDiff == 0) {
226 matchedBitrate = it->second;
227 break;
228 }
229 if (minPixelformatDiff >= pixelformatDiff) {
230 minPixelformatDiff = pixelformatDiff;
231 matchedBitrate = it->second;
232 }
233 }
234 DHLOGD("Source config: width : %{public}d, height : %{public}d, matched bitrate %{public}d.",
235 sourceConfig_.GetWidth(), sourceConfig_.GetHeight(), matchedBitrate);
236 metadataFormat_.PutIntValue("bitrate", matchedBitrate);
237 return DCAMERA_OK;
238 }
239
StartVideoEncoder()240 int32_t EncodeDataProcess::StartVideoEncoder()
241 {
242 if (videoEncoder_ == nullptr) {
243 DHLOGE("The video encoder does not exist before StopVideoEncoder.");
244 return DCAMERA_BAD_VALUE;
245 }
246
247 int32_t ret = videoEncoder_->Prepare();
248 CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
249 "Video encoder prepare failed. ret %{public}d.", ret);
250 ret = videoEncoder_->Start();
251 CHECK_AND_RETURN_RET_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK, DCAMERA_INIT_ERR,
252 "Video encoder start failed. ret %{public}d.", ret);
253 return DCAMERA_OK;
254 }
255
StopVideoEncoder()256 int32_t EncodeDataProcess::StopVideoEncoder()
257 {
258 if (videoEncoder_ == nullptr) {
259 DHLOGE("The video encoder does not exist before StopVideoEncoder.");
260 return DCAMERA_BAD_VALUE;
261 }
262
263 bool isSuccess = true;
264 int32_t ret = videoEncoder_->Flush();
265 if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
266 DHLOGE("VideoEncoder flush failed. ret %{public}d.", ret);
267 isSuccess = isSuccess && false;
268 }
269 ret = videoEncoder_->Stop();
270 if (ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK) {
271 DHLOGE("VideoEncoder stop failed. ret %{public}d.", ret);
272 isSuccess = isSuccess && false;
273 }
274
275 if (!isSuccess) {
276 return DCAMERA_BAD_OPERATE;
277 }
278 return DCAMERA_OK;
279 }
280
ReleaseVideoEncoder()281 void EncodeDataProcess::ReleaseVideoEncoder()
282 {
283 std::lock_guard<std::mutex> lck(mtxEncoderState_);
284 DHLOGD("Start release videoEncoder.");
285 if (videoEncoder_ == nullptr) {
286 DHLOGE("The video encoder does not exist before ReleaseVideoEncoder.");
287 encodeProducerSurface_ = nullptr;
288 encodeVideoCallback_ = nullptr;
289 return;
290 }
291 int32_t ret = StopVideoEncoder();
292 CHECK_AND_LOG(ret != DCAMERA_OK, "%{public}s", "StopVideoEncoder failed.");
293 ret = videoEncoder_->Release();
294 CHECK_AND_LOG(ret != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
295 "VideoEncoder release failed. ret %{public}d.", ret);
296 encodeProducerSurface_ = nullptr;
297 videoEncoder_ = nullptr;
298 encodeVideoCallback_ = nullptr;
299 DHLOGD("Start release videoEncoder success.");
300 }
301
ReleaseProcessNode()302 void EncodeDataProcess::ReleaseProcessNode()
303 {
304 DHLOGD("Start release [%{public}zu] node : EncodeNode.", nodeRank_);
305 isEncoderProcess_.store(false);
306 ReleaseVideoEncoder();
307
308 waitEncoderOutputCount_ = 0;
309 lastFeedEncoderInputBufferTimeUs_ = 0;
310 inputTimeStampUs_ = 0;
311 processType_ = "";
312
313 if (nextDataProcess_ != nullptr) {
314 nextDataProcess_->ReleaseProcessNode();
315 nextDataProcess_ = nullptr;
316 }
317 DHLOGD("Release [%{public}zu] node : EncodeNode end.", nodeRank_);
318 }
319
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)320 int32_t EncodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
321 {
322 DHLOGD("Process data in EncodeDataProcess.");
323 if (inputBuffers.empty() || inputBuffers[0] == nullptr) {
324 DHLOGE("The input data buffers is empty.");
325 return DCAMERA_BAD_VALUE;
326 }
327 if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
328 DHLOGD("The target VideoCodecType : %{public}d is the same as the source VideoCodecType : %{public}d.",
329 sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
330 return EncodeDone(inputBuffers);
331 }
332 if (videoEncoder_ == nullptr) {
333 DHLOGE("The video encoder does not exist before encoding data.");
334 return DCAMERA_INIT_ERR;
335 }
336 if (inputBuffers[0]->Size() > NORM_YUV420_BUFFER_SIZE) {
337 DHLOGE("EncodeNode input buffer size %{public}zu error.", inputBuffers[0]->Size());
338 return DCAMERA_MEMORY_OPT_ERROR;
339 }
340 CHECK_AND_RETURN_RET_LOG(!isEncoderProcess_.load(), DCAMERA_DISABLE_PROCESS, "%{public}s",
341 "EncodeNode occurred error or start release.");
342 int32_t err = FeedEncoderInputBuffer(inputBuffers[0]);
343 CHECK_AND_RETURN_RET_LOG(err != DCAMERA_OK, err, "%{public}s", "Feed encoder input Buffer failed.");
344 return DCAMERA_OK;
345 }
346
FeedEncoderInputBuffer(std::shared_ptr<DataBuffer> & inputBuffer)347 int32_t EncodeDataProcess::FeedEncoderInputBuffer(std::shared_ptr<DataBuffer>& inputBuffer)
348 {
349 std::lock_guard<std::mutex> lck(mtxEncoderState_);
350 DHLOGD("Feed encoder input buffer, buffer size %{public}zu.", inputBuffer->Size());
351 CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_INIT_ERR, "%{public}s",
352 "Get encoder input producer surface failed.");
353 sptr<SurfaceBuffer> surfacebuffer = GetEncoderInputSurfaceBuffer();
354 CHECK_AND_RETURN_RET_LOG(surfacebuffer == nullptr, DCAMERA_BAD_OPERATE, "%{public}s",
355 "Get encoder input producer surface buffer failed.");
356 uint8_t *addr = static_cast<uint8_t *>(surfacebuffer->GetVirAddr());
357 if (addr == nullptr) {
358 DHLOGE("SurfaceBuffer address is nullptr");
359 encodeProducerSurface_->CancelBuffer(surfacebuffer);
360 return DCAMERA_BAD_OPERATE;
361 }
362 size_t size = static_cast<size_t>(surfacebuffer->GetSize());
363 errno_t err = memcpy_s(addr, size, inputBuffer->Data(), inputBuffer->Size());
364 CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR,
365 "memcpy_s encoder input producer surfacebuffer failed, surBufSize %{public}zu.", size);
366
367 inputTimeStampUs_ = GetEncoderTimeStamp();
368 DHLOGD("Encoder input buffer size %{public}zu, timeStamp %{public}lld.", inputBuffer->Size(),
369 (long long)inputTimeStampUs_);
370 if (surfacebuffer->GetExtraData() == nullptr) {
371 DHLOGE("Surface buffer exist null extra data.");
372 return DCAMERA_BAD_OPERATE;
373 }
374 surfacebuffer->GetExtraData()->ExtraSet("timeStamp", inputTimeStampUs_);
375
376 BufferFlushConfig flushConfig = { {0, 0, sourceConfig_.GetWidth(), sourceConfig_.GetHeight()}, 0};
377 SurfaceError ret = encodeProducerSurface_->FlushBuffer(surfacebuffer, -1, flushConfig);
378 CHECK_AND_RETURN_RET_LOG(ret != SURFACE_ERROR_OK, DCAMERA_BAD_OPERATE, "%s",
379 "Flush encoder input producer surface buffer failed.");
380 return DCAMERA_OK;
381 }
382
GetEncoderInputSurfaceBuffer()383 sptr<SurfaceBuffer> EncodeDataProcess::GetEncoderInputSurfaceBuffer()
384 {
385 BufferRequestConfig requestConfig;
386 requestConfig.width = sourceConfig_.GetWidth();
387 requestConfig.height = sourceConfig_.GetHeight();
388 requestConfig.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA;
389 requestConfig.timeout = 0;
390 requestConfig.strideAlignment = ENCODER_STRIDE_ALIGNMENT;
391 switch (sourceConfig_.GetVideoformat()) {
392 case Videoformat::YUVI420:
393 requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_P;
394 break;
395 case Videoformat::NV12:
396 requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_SP;
397 break;
398 case Videoformat::NV21:
399 requestConfig.format = PixelFormat::PIXEL_FMT_YCRCB_420_SP;
400 break;
401 case Videoformat::RGBA_8888:
402 requestConfig.format = PixelFormat::PIXEL_FMT_RGBA_8888;
403 break;
404 default:
405 DHLOGE("The current pixel format does not support encoding.");
406 return nullptr;
407 }
408 sptr<SurfaceBuffer> surfacebuffer = nullptr;
409 int32_t flushFence = -1;
410 if (encodeProducerSurface_ == nullptr) {
411 DHLOGE("Encode producer surface is null.");
412 return nullptr;
413 }
414 GSError err = encodeProducerSurface_->RequestBuffer(surfacebuffer, flushFence, requestConfig);
415 if (err != GSERROR_OK || surfacebuffer == nullptr) {
416 DHLOGE("Request encoder input producer surface buffer failed, error code: %d.", err);
417 }
418 return surfacebuffer;
419 }
420
GetEncoderTimeStamp()421 int64_t EncodeDataProcess::GetEncoderTimeStamp()
422 {
423 if (inputTimeStampUs_ != 0) {
424 lastFeedEncoderInputBufferTimeUs_ = inputTimeStampUs_;
425 }
426 const int64_t nsPerUs = 1000L;
427 int64_t nowTimeUs = GetNowTimeStampUs() * nsPerUs;
428 return nowTimeUs;
429 }
430
IncreaseWaitEncodeCnt()431 void EncodeDataProcess::IncreaseWaitEncodeCnt()
432 {
433 std::lock_guard<std::mutex> lck(mtxHoldCount_);
434 if (lastFeedEncoderInputBufferTimeUs_ == 0) {
435 waitEncoderOutputCount_ += FIRST_FRAME_OUTPUT_NUM;
436 } else {
437 waitEncoderOutputCount_++;
438 }
439 DHLOGD("Wait encoder output frames number is %{public}d.", waitEncoderOutputCount_);
440 }
441
ReduceWaitEncodeCnt()442 void EncodeDataProcess::ReduceWaitEncodeCnt()
443 {
444 std::lock_guard<std::mutex> lck(mtxHoldCount_);
445 if (waitEncoderOutputCount_ <= 0) {
446 DHLOGE("The waitEncoderOutputCount_ = %{public}d.", waitEncoderOutputCount_);
447 }
448 waitEncoderOutputCount_--;
449 DHLOGD("Wait encoder output frames number is %{public}d.", waitEncoderOutputCount_);
450 }
451
GetEncoderOutputBuffer(uint32_t index,MediaAVCodec::AVCodecBufferInfo info,MediaAVCodec::AVCodecBufferFlag flag,std::shared_ptr<Media::AVSharedMemory> & buffer)452 int32_t EncodeDataProcess::GetEncoderOutputBuffer(uint32_t index, MediaAVCodec::AVCodecBufferInfo info,
453 MediaAVCodec::AVCodecBufferFlag flag, std::shared_ptr<Media::AVSharedMemory>& buffer)
454 {
455 DHLOGD("Get encoder output buffer.");
456 if (videoEncoder_ == nullptr) {
457 DHLOGE("The video encoder does not exist before output encoded data.");
458 return DCAMERA_BAD_VALUE;
459 }
460 if (buffer == nullptr) {
461 DHLOGE("Failed to get the output shared memory, index : %{public}u", index);
462 return DCAMERA_BAD_OPERATE;
463 }
464
465 CHECK_AND_RETURN_RET_LOG(info.size <= 0 || info.size > DATABUFF_MAX_SIZE, DCAMERA_BAD_VALUE,
466 "AVCodecBufferInfo error, buffer size : %{public}d", info.size);
467 size_t outputMemoDataSize = static_cast<size_t>(info.size);
468 DHLOGD("Encoder output buffer size : %{public}zu", outputMemoDataSize);
469 std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(outputMemoDataSize);
470 errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(),
471 buffer->GetBase(), outputMemoDataSize);
472 CHECK_AND_RETURN_RET_LOG(err != EOK, DCAMERA_MEMORY_OPT_ERROR, "%{public}s", "memcpy_s buffer failed.");
473 int64_t timeStamp = info.presentationTimeUs;
474 struct timespec time = {0, 0};
475 clock_gettime(CLOCK_MONOTONIC, &time);
476 int64_t timeNs = static_cast<int64_t>(time.tv_sec) * S2NS + static_cast<int64_t>(time.tv_nsec);
477 int64_t encodeT = (timeNs - timeStamp) / static_cast<int64_t>(US2NS);
478 int64_t finishEncodeT = GetNowTimeStampUs();
479 int64_t startEncodeT = finishEncodeT - encodeT;
480 bufferOutput->SetInt64(START_ENCODE_TIME_US, startEncodeT);
481 bufferOutput->SetInt64(FINISH_ENCODE_TIME_US, finishEncodeT);
482 bufferOutput->SetInt64(TIME_STAMP_US, timeStamp);
483 bufferOutput->SetInt32(FRAME_TYPE, flag);
484 bufferOutput->SetInt32(INDEX, index_);
485 index_++;
486 std::vector<std::shared_ptr<DataBuffer>> nextInputBuffers;
487 nextInputBuffers.push_back(bufferOutput);
488 return EncodeDone(nextInputBuffers);
489 }
490
EncodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)491 int32_t EncodeDataProcess::EncodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
492 {
493 DHLOGD("Encoder done.");
494 if (outputBuffers.empty()) {
495 DHLOGE("The received data buffers is empty.");
496 return DCAMERA_BAD_VALUE;
497 }
498
499 if (nextDataProcess_ != nullptr) {
500 DHLOGD("Send to the next node of the encoder for processing.");
501 int32_t err = nextDataProcess_->ProcessData(outputBuffers);
502 CHECK_AND_LOG(err != DCAMERA_OK, "%{public}s", "Someone node after the encoder processes failed.");
503 return err;
504 }
505 DHLOGD("The current node is the last node, and Output the processed video buffer");
506 std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
507 if (targetPipelineSink == nullptr) {
508 DHLOGE("callbackPipelineSink_ is nullptr.");
509 return DCAMERA_BAD_VALUE;
510 }
511 targetPipelineSink->OnProcessedVideoBuffer(outputBuffers[0]);
512 return DCAMERA_OK;
513 }
514
OnError()515 void EncodeDataProcess::OnError()
516 {
517 DHLOGD("EncodeDataProcess : OnError.");
518 isEncoderProcess_.store(false);
519 if (videoEncoder_ != nullptr) {
520 videoEncoder_->Flush();
521 videoEncoder_->Stop();
522 }
523 std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
524 CHECK_AND_RETURN_LOG(targetPipelineSink == nullptr, "%{public}s", "callbackPipelineSink_ is nullptr.");
525 targetPipelineSink->OnError(DataProcessErrorType::ERROR_PIPELINE_ENCODER);
526 }
527
OnInputBufferAvailable(uint32_t index,std::shared_ptr<Media::AVSharedMemory> buffer)528 void EncodeDataProcess::OnInputBufferAvailable(uint32_t index, std::shared_ptr<Media::AVSharedMemory> buffer)
529 {
530 DHLOGD("The available input buffer index : %{public}u. No operation when using input.", index);
531 }
532
OnOutputFormatChanged(const Media::Format & format)533 void EncodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
534 {
535 if (encodeOutputFormat_.GetFormatMap().empty()) {
536 DHLOGE("The first changed video encoder output format is null.");
537 return;
538 }
539 encodeOutputFormat_ = format;
540 }
541
OnOutputBufferAvailable(uint32_t index,MediaAVCodec::AVCodecBufferInfo info,MediaAVCodec::AVCodecBufferFlag flag,std::shared_ptr<Media::AVSharedMemory> buffer)542 void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, MediaAVCodec::AVCodecBufferInfo info,
543 MediaAVCodec::AVCodecBufferFlag flag, std::shared_ptr<Media::AVSharedMemory> buffer)
544 {
545 if (!isEncoderProcess_.load()) {
546 DHLOGE("EncodeNode occurred error or start release.");
547 return;
548 }
549 DHLOGD("Video encode buffer info: presentation TimeUs %{public}" PRId64", size %{public}d, offset %{public}d, "
550 "flag %{public}d", info.presentationTimeUs, info.size, info.offset, flag);
551 int32_t err = GetEncoderOutputBuffer(index, info, flag, buffer);
552 if (err != DCAMERA_OK) {
553 DHLOGE("Get encode output Buffer failed.");
554 return;
555 }
556 CHECK_AND_RETURN_LOG(videoEncoder_ == nullptr, "%{public}s",
557 "The video encoder does not exist before release output buffer index.");
558 int32_t errRelease = videoEncoder_->ReleaseOutputBuffer(index);
559 CHECK_AND_LOG(errRelease != MediaAVCodec::AVCodecServiceErrCode::AVCS_ERR_OK,
560 "The video encoder release output buffer failed, index : [%{public}u].", index);
561 }
562
GetSourceConfig() const563 VideoConfigParams EncodeDataProcess::GetSourceConfig() const
564 {
565 return sourceConfig_;
566 }
567
GetTargetConfig() const568 VideoConfigParams EncodeDataProcess::GetTargetConfig() const
569 {
570 return targetConfig_;
571 }
572
GetProperty(const std::string & propertyName,PropertyCarrier & propertyCarrier)573 int32_t EncodeDataProcess::GetProperty(const std::string& propertyName, PropertyCarrier& propertyCarrier)
574 {
575 if (propertyName != surfaceStr_) {
576 return DCAMERA_OK;
577 }
578 CHECK_AND_RETURN_RET_LOG(encodeProducerSurface_ == nullptr, DCAMERA_BAD_VALUE, "%{public}s",
579 "EncodeDataProcess::GetProperty: encode dataProcess get property fail, encode surface is nullptr.");
580 encodeProducerSurface_->SetDefaultUsage(encodeProducerSurface_->GetDefaultUsage() & (~BUFFER_USAGE_VIDEO_ENCODER));
581 return propertyCarrier.CarrySurfaceProperty(encodeProducerSurface_);
582 }
583 } // namespace DistributedHardware
584 } // namespace OHOS
585