1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "encode_data_process.h"
17
18 #include <cmath>
19
20 #include "distributed_hardware_log.h"
21 #include "graphic_common_c.h"
22
23 #include "dcamera_hisysevent_adapter.h"
24 #include "dcamera_utils_tools.h"
25 #include "encode_video_callback.h"
26
27 #ifndef DH_LOG_TAG
28 #define DH_LOG_TAG "DCDP_NODE_ENCODEC"
29 #endif
30
31 namespace OHOS {
32 namespace DistributedHardware {
33 const std::map<int64_t, int32_t> EncodeDataProcess::ENCODER_BITRATE_TABLE = {
34 std::map<int64_t, int32_t>::value_type(WIDTH_320_HEIGHT_240, BITRATE_500000),
35 std::map<int64_t, int32_t>::value_type(WIDTH_480_HEIGHT_360, BITRATE_1110000),
36 std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_360, BITRATE_1500000),
37 std::map<int64_t, int32_t>::value_type(WIDTH_640_HEIGHT_480, BITRATE_1800000),
38 std::map<int64_t, int32_t>::value_type(WIDTH_720_HEIGHT_540, BITRATE_2100000),
39 std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_540, BITRATE_2300000),
40 std::map<int64_t, int32_t>::value_type(WIDTH_960_HEIGHT_720, BITRATE_2800000),
41 std::map<int64_t, int32_t>::value_type(WIDTH_1280_HEIGHT_720, BITRATE_3400000),
42 std::map<int64_t, int32_t>::value_type(WIDTH_1440_HEIGHT_1080, BITRATE_5000000),
43 std::map<int64_t, int32_t>::value_type(WIDTH_1920_HEIGHT_1080, BITRATE_6000000),
44 };
45 const string ENUM_VIDEOFORMAT_STRINGS[] = {
46 "YUVI420", "NV12", "NV21", "RGBA_8888"
47 };
48
~EncodeDataProcess()49 EncodeDataProcess::~EncodeDataProcess()
50 {
51 if (isEncoderProcess_.load()) {
52 DHLOGD("~EncodeDataProcess : ReleaseProcessNode.");
53 ReleaseProcessNode();
54 }
55 }
56
InitNode(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig,VideoConfigParams & processedConfig)57 int32_t EncodeDataProcess::InitNode(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig,
58 VideoConfigParams& processedConfig)
59 {
60 DHLOGD("Init DCamera EncodeNode start.");
61 if (!(IsInEncoderRange(sourceConfig) && IsInEncoderRange(targetConfig))) {
62 DHLOGE("Source config or target config are invalid.");
63 return DCAMERA_BAD_VALUE;
64 }
65 if (!IsConvertible(sourceConfig, targetConfig)) {
66 DHLOGE("The EncodeNode cannot convert source VideoCodecType %d to target VideoCodecType %d.",
67 sourceConfig.GetVideoCodecType(), targetConfig.GetVideoCodecType());
68 return DCAMERA_BAD_TYPE;
69 }
70
71 sourceConfig_ = sourceConfig;
72 targetConfig_ = targetConfig;
73 if (sourceConfig_.GetVideoCodecType() == targetConfig_.GetVideoCodecType()) {
74 DHLOGD("Disable EncodeNode. The target VideoCodecType %d is the same as the source VideoCodecType %d.",
75 sourceConfig_.GetVideoCodecType(), targetConfig_.GetVideoCodecType());
76 processedConfig_ = sourceConfig;
77 processedConfig = processedConfig_;
78 isEncoderProcess_.store(true);
79 return DCAMERA_OK;
80 }
81
82 int32_t err = InitEncoder();
83 if (err != DCAMERA_OK) {
84 DHLOGE("Init video encoder failed.");
85 ReleaseProcessNode();
86 return err;
87 }
88 processedConfig = processedConfig_;
89 isEncoderProcess_.store(true);
90 return DCAMERA_OK;
91 }
92
IsInEncoderRange(const VideoConfigParams & curConfig)93 bool EncodeDataProcess::IsInEncoderRange(const VideoConfigParams& curConfig)
94 {
95 return (curConfig.GetWidth() >= MIN_VIDEO_WIDTH || curConfig.GetWidth() <= MAX_VIDEO_WIDTH ||
96 curConfig.GetHeight() >= MIN_VIDEO_HEIGHT || curConfig.GetHeight() <= MAX_VIDEO_HEIGHT ||
97 curConfig.GetFrameRate() >= MIN_FRAME_RATE || curConfig.GetFrameRate() <= MAX_FRAME_RATE);
98 }
99
IsConvertible(const VideoConfigParams & sourceConfig,const VideoConfigParams & targetConfig)100 bool EncodeDataProcess::IsConvertible(const VideoConfigParams& sourceConfig, const VideoConfigParams& targetConfig)
101 {
102 return (sourceConfig.GetVideoCodecType() == targetConfig.GetVideoCodecType() ||
103 sourceConfig.GetVideoCodecType() == VideoCodecType::NO_CODEC);
104 }
105
InitEncoder()106 int32_t EncodeDataProcess::InitEncoder()
107 {
108 DHLOGD("Init video encoder.");
109 int32_t ret = ConfigureVideoEncoder();
110 if (ret != DCAMERA_OK) {
111 DHLOGE("Init video encoder metadata format failed. Error code %d.", ret);
112 return ret;
113 }
114
115 ret = StartVideoEncoder();
116 if (ret != DCAMERA_OK) {
117 DHLOGE("Start Video encoder failed.");
118 ReportDcamerOptFail(DCAMERA_OPT_FAIL, DCAMERA_ENCODE_ERROR,
119 CreateMsg("start video encoder failed, width: %d, height: %d, format: %s", sourceConfig_.GetWidth(),
120 sourceConfig_.GetHeight(),
121 ENUM_VIDEOFORMAT_STRINGS[static_cast<int32_t>(sourceConfig_.GetVideoformat())].c_str()));
122 return ret;
123 }
124
125 return DCAMERA_OK;
126 }
127
ConfigureVideoEncoder()128 int32_t EncodeDataProcess::ConfigureVideoEncoder()
129 {
130 int32_t ret = InitEncoderMetadataFormat();
131 if (ret != DCAMERA_OK) {
132 DHLOGE("Init video encoder metadata format failed. Error code %d.", ret);
133 return ret;
134 }
135 ret = InitEncoderBitrateFormat();
136 if (ret != DCAMERA_OK) {
137 DHLOGE("Init video encoder bitrate format failed. Error code %d.", ret);
138 return ret;
139 }
140
141 videoEncoder_ = Media::VideoEncoderFactory::CreateByMime(processType_);
142 if (videoEncoder_ == nullptr) {
143 DHLOGE("Create video encoder failed.");
144 return DCAMERA_INIT_ERR;
145 }
146 encodeVideoCallback_ = std::make_shared<EncodeVideoCallback>(shared_from_this());
147 ret = videoEncoder_->SetCallback(encodeVideoCallback_);
148 if (ret != Media::MediaServiceErrCode::MSERR_OK) {
149 DHLOGE("Set video encoder callback failed. Error code %d.", ret);
150 return DCAMERA_INIT_ERR;
151 }
152
153 ret = videoEncoder_->Configure(metadataFormat_);
154 if (ret != Media::MediaServiceErrCode::MSERR_OK) {
155 DHLOGE("Set video encoder metadata format failed. Error code %d.", ret);
156 return DCAMERA_INIT_ERR;
157 }
158
159 encodeProducerSurface_ = videoEncoder_->CreateInputSurface();
160 if (encodeProducerSurface_ == nullptr) {
161 DHLOGE("Get video encoder producer surface failed.");
162 return DCAMERA_INIT_ERR;
163 }
164
165 return DCAMERA_OK;
166 }
167
InitEncoderMetadataFormat()168 int32_t EncodeDataProcess::InitEncoderMetadataFormat()
169 {
170 DHLOGD("Init video encoder metadata format.");
171
172 processedConfig_ = sourceConfig_;
173
174 switch (targetConfig_.GetVideoCodecType()) {
175 case VideoCodecType::CODEC_H264:
176 processType_ = "video/avc";
177 metadataFormat_.PutStringValue("codec_mime", processType_);
178 metadataFormat_.PutIntValue("codec_profile", Media::AVCProfile::AVC_PROFILE_BASELINE);
179
180 processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H264);
181 break;
182 case VideoCodecType::CODEC_H265:
183 processType_ = "video/hevc";
184 metadataFormat_.PutStringValue("codec_mime", processType_);
185 metadataFormat_.PutIntValue("codec_profile", Media::HEVCProfile::HEVC_PROFILE_MAIN);
186
187 processedConfig_.SetVideoCodecType(VideoCodecType::CODEC_H265);
188 break;
189 default:
190 DHLOGE("The current codec type does not support encoding.");
191 return DCAMERA_NOT_FOUND;
192 }
193
194 switch (sourceConfig_.GetVideoformat()) {
195 case Videoformat::YUVI420:
196 metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::YUVI420);
197 break;
198 case Videoformat::NV12:
199 metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV12);
200 break;
201 case Videoformat::NV21:
202 metadataFormat_.PutIntValue("pixel_format", Media::VideoPixelFormat::NV21);
203 break;
204 default:
205 DHLOGE("The current pixel format does not support encoding.");
206 return DCAMERA_NOT_FOUND;
207 }
208
209 metadataFormat_.PutLongValue("max_input_size", NORM_YUV420_BUFFER_SIZE);
210 metadataFormat_.PutIntValue("width", (int32_t)sourceConfig_.GetWidth());
211 metadataFormat_.PutIntValue("height", (int32_t)sourceConfig_.GetHeight());
212 metadataFormat_.PutIntValue("frame_rate", MAX_FRAME_RATE);
213 return DCAMERA_OK;
214 }
215
InitEncoderBitrateFormat()216 int32_t EncodeDataProcess::InitEncoderBitrateFormat()
217 {
218 DHLOGD("Init video encoder bitrate format.");
219 if (!(IsInEncoderRange(sourceConfig_) && IsInEncoderRange(targetConfig_))) {
220 DHLOGE("Source config or target config are invalid.");
221 return DCAMERA_BAD_VALUE;
222 }
223 metadataFormat_.PutIntValue("i_frame_interval", IDR_FRAME_INTERVAL_MS);
224 metadataFormat_.PutIntValue("video_encode_bitrate_mode", Media::VideoEncodeBitrateMode::VBR);
225
226 if (ENCODER_BITRATE_TABLE.empty()) {
227 DHLOGD("ENCODER_BITRATE_TABLE is null, use the default bitrate of the encoder.");
228 return DCAMERA_OK;
229 }
230 int64_t pixelformat = static_cast<int64_t>(sourceConfig_.GetWidth() * sourceConfig_.GetHeight());
231 int32_t matchedBitrate = BITRATE_6000000;
232 int64_t minPixelformatDiff = WIDTH_1920_HEIGHT_1080 - pixelformat;
233 for (auto it = ENCODER_BITRATE_TABLE.begin(); it != ENCODER_BITRATE_TABLE.end(); it++) {
234 int64_t pixelformatDiff = abs(pixelformat - it->first);
235 if (pixelformatDiff == 0) {
236 matchedBitrate = it->second;
237 break;
238 }
239 if (minPixelformatDiff >= pixelformatDiff) {
240 minPixelformatDiff = pixelformatDiff;
241 matchedBitrate = it->second;
242 }
243 }
244 DHLOGD("Source config: width : %d, height : %d, matched bitrate %d.", sourceConfig_.GetWidth(),
245 sourceConfig_.GetHeight(), matchedBitrate);
246 metadataFormat_.PutIntValue("bitrate", matchedBitrate);
247 return DCAMERA_OK;
248 }
249
StartVideoEncoder()250 int32_t EncodeDataProcess::StartVideoEncoder()
251 {
252 if (videoEncoder_ == nullptr) {
253 DHLOGE("The video encoder does not exist before StopVideoEncoder.");
254 return DCAMERA_BAD_VALUE;
255 }
256
257 int32_t ret = videoEncoder_->Prepare();
258 if (ret != Media::MediaServiceErrCode::MSERR_OK) {
259 DHLOGE("Video encoder prepare failed. Error code %d.", ret);
260 return DCAMERA_INIT_ERR;
261 }
262 ret = videoEncoder_->Start();
263 if (ret != Media::MediaServiceErrCode::MSERR_OK) {
264 DHLOGE("Video encoder start failed. Error code %d.", ret);
265 return DCAMERA_INIT_ERR;
266 }
267 return DCAMERA_OK;
268 }
269
StopVideoEncoder()270 int32_t EncodeDataProcess::StopVideoEncoder()
271 {
272 if (videoEncoder_ == nullptr) {
273 DHLOGE("The video encoder does not exist before StopVideoEncoder.");
274 return DCAMERA_BAD_VALUE;
275 }
276
277 bool isSuccess = true;
278 int32_t ret = videoEncoder_->Flush();
279 if (ret != Media::MediaServiceErrCode::MSERR_OK) {
280 DHLOGE("VideoEncoder flush failed. Error type: %d.", ret);
281 isSuccess = isSuccess && false;
282 }
283 ret = videoEncoder_->Stop();
284 if (ret != Media::MediaServiceErrCode::MSERR_OK) {
285 DHLOGE("VideoEncoder stop failed. Error type: %d.", ret);
286 isSuccess = isSuccess && false;
287 }
288
289 if (!isSuccess) {
290 return DCAMERA_BAD_OPERATE;
291 }
292 return DCAMERA_OK;
293 }
294
ReleaseVideoEncoder()295 void EncodeDataProcess::ReleaseVideoEncoder()
296 {
297 std::lock_guard<std::mutex> lck(mtxEncoderState_);
298 DHLOGD("Start release videoEncoder.");
299 if (videoEncoder_ == nullptr) {
300 DHLOGE("The video encoder does not exist before ReleaseVideoEncoder.");
301 encodeProducerSurface_ = nullptr;
302 encodeVideoCallback_ = nullptr;
303 return;
304 }
305 int32_t ret = StopVideoEncoder();
306 if (ret != DCAMERA_OK) {
307 DHLOGE("StopVideoEncoder failed.");
308 }
309 ret = videoEncoder_->Release();
310 if (ret != Media::MediaServiceErrCode::MSERR_OK) {
311 DHLOGE("VideoEncoder release failed. Error type: %d.", ret);
312 }
313 encodeProducerSurface_ = nullptr;
314 videoEncoder_ = nullptr;
315 encodeVideoCallback_ = nullptr;
316 DHLOGD("Start release videoEncoder success.");
317 }
318
ReleaseProcessNode()319 void EncodeDataProcess::ReleaseProcessNode()
320 {
321 DHLOGD("Start release [%d] node : EncodeNode.", nodeRank_);
322 isEncoderProcess_.store(false);
323 ReleaseVideoEncoder();
324
325 waitEncoderOutputCount_ = 0;
326 lastFeedEncoderInputBufferTimeUs_ = 0;
327 inputTimeStampUs_ = 0;
328 processType_ = "";
329
330 if (nextDataProcess_ != nullptr) {
331 nextDataProcess_->ReleaseProcessNode();
332 nextDataProcess_ = nullptr;
333 }
334 DHLOGD("Release [%d] node : EncodeNode end.", nodeRank_);
335 }
336
ProcessData(std::vector<std::shared_ptr<DataBuffer>> & inputBuffers)337 int32_t EncodeDataProcess::ProcessData(std::vector<std::shared_ptr<DataBuffer>>& inputBuffers)
338 {
339 DHLOGD("Process data in EncodeDataProcess.");
340 if (inputBuffers.empty()) {
341 DHLOGE("The input data buffers is empty.");
342 return DCAMERA_BAD_VALUE;
343 }
344 if (sourceConfig_.GetVideoCodecType() == processedConfig_.GetVideoCodecType()) {
345 DHLOGD("The target VideoCodecType : %d is the same as the source VideoCodecType : %d.",
346 sourceConfig_.GetVideoCodecType(), processedConfig_.GetVideoCodecType());
347 return EncodeDone(inputBuffers);
348 }
349
350 if (videoEncoder_ == nullptr) {
351 DHLOGE("The video encoder does not exist before encoding data.");
352 return DCAMERA_INIT_ERR;
353 }
354 if (inputBuffers[0]->Size() > NORM_YUV420_BUFFER_SIZE) {
355 DHLOGE("EncodeNode input buffer size %d error.", inputBuffers[0]->Size());
356 return DCAMERA_MEMORY_OPT_ERROR;
357 }
358 if (!isEncoderProcess_.load()) {
359 DHLOGE("EncodeNode occurred error or start release.");
360 return DCAMERA_DISABLE_PROCESS;
361 }
362 int32_t err = FeedEncoderInputBuffer(inputBuffers[0]);
363 if (err != DCAMERA_OK) {
364 DHLOGE("Feed encoder input Buffer failed.");
365 return err;
366 }
367
368 IncreaseWaitEncodeCnt();
369 return DCAMERA_OK;
370 }
371
FeedEncoderInputBuffer(std::shared_ptr<DataBuffer> & inputBuffer)372 int32_t EncodeDataProcess::FeedEncoderInputBuffer(std::shared_ptr<DataBuffer>& inputBuffer)
373 {
374 std::lock_guard<std::mutex> lck(mtxEncoderState_);
375 DHLOGD("Feed encoder input buffer, buffer size %d.", inputBuffer->Size());
376 if (encodeProducerSurface_ == nullptr) {
377 DHLOGE("Get encoder input producer surface failed.");
378 return DCAMERA_INIT_ERR;
379 }
380
381 sptr<SurfaceBuffer> surfacebuffer = GetEncoderInputSurfaceBuffer();
382 if (surfacebuffer == nullptr) {
383 DHLOGE("Get encoder input producer surface buffer failed.");
384 return DCAMERA_BAD_OPERATE;
385 }
386 uint8_t *addr = static_cast<uint8_t *>(surfacebuffer->GetVirAddr());
387 if (addr == nullptr) {
388 DHLOGE("SurfaceBuffer address is nullptr");
389 encodeProducerSurface_->CancelBuffer(surfacebuffer);
390 return DCAMERA_BAD_OPERATE;
391 }
392 size_t size = static_cast<size_t>(surfacebuffer->GetSize());
393 errno_t err = memcpy_s(addr, size, inputBuffer->Data(), inputBuffer->Size());
394 if (err != EOK) {
395 DHLOGE("memcpy_s encoder input producer surface buffer failed, surBufSize %z.", size);
396 return DCAMERA_MEMORY_OPT_ERROR;
397 }
398
399 inputTimeStampUs_ = GetEncoderTimeStamp();
400 DHLOGD("Encoder input buffer size %d, timeStamp %lld.", inputBuffer->Size(), (long long)inputTimeStampUs_);
401 surfacebuffer->GetExtraData()->ExtraSet("timeStamp", inputTimeStampUs_);
402
403 BufferFlushConfig flushConfig = { {0, 0, sourceConfig_.GetWidth(), sourceConfig_.GetHeight()}, 0};
404 SurfaceError ret = encodeProducerSurface_->FlushBuffer(surfacebuffer, -1, flushConfig);
405 if (ret != SURFACE_ERROR_OK) {
406 DHLOGE("Flush encoder input producer surface buffer failed.");
407 return DCAMERA_BAD_OPERATE;
408 }
409 return DCAMERA_OK;
410 }
411
GetEncoderInputSurfaceBuffer()412 sptr<SurfaceBuffer> EncodeDataProcess::GetEncoderInputSurfaceBuffer()
413 {
414 BufferRequestConfig requestConfig;
415 requestConfig.width = sourceConfig_.GetWidth();
416 requestConfig.height = sourceConfig_.GetHeight();
417 requestConfig.usage = BUFFER_USAGE_CPU_READ | BUFFER_USAGE_CPU_WRITE | BUFFER_USAGE_MEM_DMA;
418 requestConfig.timeout = 0;
419 requestConfig.strideAlignment = ENCODER_STRIDE_ALIGNMENT;
420 switch (sourceConfig_.GetVideoformat()) {
421 case Videoformat::YUVI420:
422 requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_P;
423 break;
424 case Videoformat::NV12:
425 requestConfig.format = PixelFormat::PIXEL_FMT_YCBCR_420_SP;
426 break;
427 case Videoformat::NV21:
428 requestConfig.format = PixelFormat::PIXEL_FMT_YCRCB_420_SP;
429 break;
430 default:
431 DHLOGE("The current pixel format does not support encoding.");
432 return nullptr;
433 }
434 sptr<SurfaceBuffer> surfacebuffer = nullptr;
435 int32_t flushFence = -1;
436 GSError err = encodeProducerSurface_->RequestBuffer(surfacebuffer, flushFence, requestConfig);
437 if (err != GSERROR_OK || surfacebuffer == nullptr) {
438 DHLOGE("Request encoder input producer surface buffer failed, error code: %d.", err);
439 }
440 return surfacebuffer;
441 }
442
GetEncoderTimeStamp()443 int64_t EncodeDataProcess::GetEncoderTimeStamp()
444 {
445 if (inputTimeStampUs_ != 0) {
446 lastFeedEncoderInputBufferTimeUs_ = inputTimeStampUs_;
447 }
448 const int64_t nsPerUs = 1000L;
449 int64_t nowTimeUs = GetNowTimeStampUs() * nsPerUs;
450 return nowTimeUs;
451 }
452
IncreaseWaitEncodeCnt()453 void EncodeDataProcess::IncreaseWaitEncodeCnt()
454 {
455 std::lock_guard<std::mutex> lck(mtxHoldCount_);
456 if (lastFeedEncoderInputBufferTimeUs_ == 0) {
457 waitEncoderOutputCount_ += FIRST_FRAME_OUTPUT_NUM;
458 } else {
459 waitEncoderOutputCount_++;
460 }
461 DHLOGD("Wait encoder output frames number is %d.", waitEncoderOutputCount_);
462 }
463
ReduceWaitEncodeCnt()464 void EncodeDataProcess::ReduceWaitEncodeCnt()
465 {
466 std::lock_guard<std::mutex> lck(mtxHoldCount_);
467 if (waitEncoderOutputCount_ <= 0) {
468 DHLOGE("The waitEncoderOutputCount_ = %d.", waitEncoderOutputCount_);
469 }
470 waitEncoderOutputCount_--;
471 DHLOGD("Wait encoder output frames number is %d.", waitEncoderOutputCount_);
472 }
473
GetEncoderOutputBuffer(uint32_t index,Media::AVCodecBufferInfo info)474 int32_t EncodeDataProcess::GetEncoderOutputBuffer(uint32_t index, Media::AVCodecBufferInfo info)
475 {
476 DHLOGD("Get encoder output buffer.");
477 if (videoEncoder_ == nullptr) {
478 DHLOGE("The video encoder does not exist before output encoded data.");
479 return DCAMERA_BAD_VALUE;
480 }
481 std::shared_ptr<Media::AVSharedMemory> sharedMemoryOutput = videoEncoder_->GetOutputBuffer(index);
482 if (sharedMemoryOutput == nullptr) {
483 DHLOGE("Failed to get the output shared memory, index : %u", index);
484 return DCAMERA_BAD_OPERATE;
485 }
486
487 if (info.size <= 0 || info.size > DATABUFF_MAX_SIZE) {
488 DHLOGE("AVCodecBufferInfo error, buffer size : %d", info.size);
489 return DCAMERA_BAD_VALUE;
490 }
491
492 size_t outputMemoDataSize = static_cast<size_t>(info.size);
493 DHLOGD("Encoder output buffer size : %d", outputMemoDataSize);
494 std::shared_ptr<DataBuffer> bufferOutput = std::make_shared<DataBuffer>(outputMemoDataSize);
495 errno_t err = memcpy_s(bufferOutput->Data(), bufferOutput->Size(),
496 sharedMemoryOutput->GetBase(), outputMemoDataSize);
497 if (err != EOK) {
498 DHLOGE("memcpy_s buffer failed.");
499 return DCAMERA_MEMORY_OPT_ERROR;
500 }
501 bufferOutput->SetInt64("timeUs", info.presentationTimeUs);
502
503 std::vector<std::shared_ptr<DataBuffer>> nextInputBuffers;
504 nextInputBuffers.push_back(bufferOutput);
505 return EncodeDone(nextInputBuffers);
506 }
507
EncodeDone(std::vector<std::shared_ptr<DataBuffer>> & outputBuffers)508 int32_t EncodeDataProcess::EncodeDone(std::vector<std::shared_ptr<DataBuffer>>& outputBuffers)
509 {
510 DHLOGD("Encoder done.");
511 if (outputBuffers.empty()) {
512 DHLOGE("The received data buffers is empty.");
513 return DCAMERA_BAD_VALUE;
514 }
515
516 if (nextDataProcess_ != nullptr) {
517 DHLOGD("Send to the next node of the encoder for processing.");
518 int32_t err = nextDataProcess_->ProcessData(outputBuffers);
519 if (err != DCAMERA_OK) {
520 DHLOGE("Someone node after the encoder processes failed.");
521 }
522 return err;
523 }
524 DHLOGD("The current node is the last node, and Output the processed video buffer");
525 std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
526 if (targetPipelineSink == nullptr) {
527 DHLOGE("callbackPipelineSink_ is nullptr.");
528 return DCAMERA_BAD_VALUE;
529 }
530 targetPipelineSink->OnProcessedVideoBuffer(outputBuffers[0]);
531 return DCAMERA_OK;
532 }
533
OnError()534 void EncodeDataProcess::OnError()
535 {
536 DHLOGD("EncodeDataProcess : OnError.");
537 isEncoderProcess_.store(false);
538 if (videoEncoder_ != nullptr) {
539 videoEncoder_->Flush();
540 videoEncoder_->Stop();
541 }
542 std::shared_ptr<DCameraPipelineSink> targetPipelineSink = callbackPipelineSink_.lock();
543 if (targetPipelineSink == nullptr) {
544 DHLOGE("callbackPipelineSink_ is nullptr.");
545 return;
546 }
547 targetPipelineSink->OnError(DataProcessErrorType::ERROR_PIPELINE_ENCODER);
548 }
549
OnInputBufferAvailable(uint32_t index)550 void EncodeDataProcess::OnInputBufferAvailable(uint32_t index)
551 {
552 DHLOGD("The available input buffer index : %u. No operation when using surface input.", index);
553 }
554
OnOutputFormatChanged(const Media::Format & format)555 void EncodeDataProcess::OnOutputFormatChanged(const Media::Format &format)
556 {
557 if (encodeOutputFormat_.GetFormatMap().empty()) {
558 DHLOGE("The first changed video encoder output format is null.");
559 return;
560 }
561 encodeOutputFormat_ = format;
562 }
563
OnOutputBufferAvailable(uint32_t index,Media::AVCodecBufferInfo info,Media::AVCodecBufferFlag flag)564 void EncodeDataProcess::OnOutputBufferAvailable(uint32_t index, Media::AVCodecBufferInfo info,
565 Media::AVCodecBufferFlag flag)
566 {
567 if (!isEncoderProcess_.load()) {
568 DHLOGE("EncodeNode occurred error or start release.");
569 return;
570 }
571 DHLOGD("Video encode buffer info: presentation TimeUs %lld, size %d, offset %d, flag %d",
572 info.presentationTimeUs, info.size, info.offset, flag);
573 int32_t err = GetEncoderOutputBuffer(index, info);
574 if (err != DCAMERA_OK) {
575 DHLOGE("Get encode output Buffer failed.");
576 return;
577 }
578 ReduceWaitEncodeCnt();
579
580 if (videoEncoder_ == nullptr) {
581 DHLOGE("The video encoder does not exist before release output buffer index.");
582 return;
583 }
584 int32_t errRelease = videoEncoder_->ReleaseOutputBuffer(index);
585 if (errRelease != Media::MediaServiceErrCode::MSERR_OK) {
586 DHLOGE("The video encoder release output buffer failed, index : [%u].", index);
587 }
588 }
589
GetSourceConfig() const590 VideoConfigParams EncodeDataProcess::GetSourceConfig() const
591 {
592 return sourceConfig_;
593 }
594
GetTargetConfig() const595 VideoConfigParams EncodeDataProcess::GetTargetConfig() const
596 {
597 return targetConfig_;
598 }
599 } // namespace DistributedHardware
600 } // namespace OHOS
601