1 /*
2 * Copyright (c) 2024-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "avcodec_task_manager.h"
17
18 #include <algorithm>
19 #include <chrono>
20 #include <cinttypes>
21 #include <cstdint>
22 #include <fcntl.h>
23 #include <memory>
24 #include <mutex>
25 #include <unistd.h>
26 #include <utility>
27 #include "datetime_ex.h"
28 #include "camera_util.h"
29 #include "audio_capturer_session.h"
30 #include "audio_record.h"
31 #include "audio_video_muxer.h"
32 #include "audio_deferred_process.h"
33 #include "utils/camera_log.h"
34 #include "frame_record.h"
35 #include "native_avbuffer.h"
36 #include "native_avbuffer_info.h"
37 #include "sample_info.h"
38 #include "native_mfmagic.h"
39 #include "sync_fence.h"
40
41 namespace {
42 using namespace std::string_literals;
43 using namespace std::chrono_literals;
44 } // namespace
45 namespace OHOS {
46 namespace CameraStandard {
47
~AvcodecTaskManager()48 AvcodecTaskManager::~AvcodecTaskManager()
49 {
50 CAMERA_SYNC_TRACE;
51 Release();
52 ClearTaskResource();
53 }
54
AvcodecTaskManager(sptr<AudioCapturerSession> audioCaptureSession,VideoCodecType type,ColorSpace colorSpace)55 AvcodecTaskManager::AvcodecTaskManager(sptr<AudioCapturerSession> audioCaptureSession,
56 VideoCodecType type, ColorSpace colorSpace) : videoCodecType_(type), colorSpace_(colorSpace)
57 {
58 CAMERA_SYNC_TRACE;
59 #ifdef MOVING_PHOTO_ADD_AUDIO
60 audioCapturerSession_ = audioCaptureSession;
61 audioEncoder_ = make_unique<AudioEncoder>();
62 #endif
63 // Create Task Manager
64 videoEncoder_ = make_shared<VideoEncoder>(type, colorSpace);
65 }
66
AvcodecTaskManager(wptr<Surface> movingSurface,shared_ptr<Size> size,sptr<AudioCapturerSession> audioCaptureSession,VideoCodecType type,ColorSpace colorSpace)67 AvcodecTaskManager::AvcodecTaskManager(wptr<Surface> movingSurface, shared_ptr<Size> size,
68 sptr<AudioCapturerSession> audioCaptureSession, VideoCodecType type, ColorSpace colorSpace)
69 :videoCodecType_(type), colorSpace_(colorSpace), movingSurface_(movingSurface), size_(size)
70 {
71 CAMERA_SYNC_TRACE;
72 #ifdef MOVING_PHOTO_ADD_AUDIO
73 audioCapturerSession_ = audioCaptureSession;
74 audioEncoder_ = make_unique<AudioEncoder>();
75 #endif
76 // Create Task Manager
77 videoEncoder_ = make_shared<VideoEncoder>(type, colorSpace);
78 }
79
AsyncInitVideoCodec()80 void AvcodecTaskManager::AsyncInitVideoCodec()
81 {
82 MEDIA_INFO_LOG("AvcodecTaskManager AsyncInitVideoCodec enter");
83 auto thisPtr = sptr<AvcodecTaskManager>(this);
84 std::thread([thisPtr]() {
85 if (thisPtr->videoEncoder_) {
86 thisPtr->videoEncoder_->SetVideoCodec(thisPtr->size_, 0);
87 } else {
88 MEDIA_ERR_LOG("init videoCodec faild");
89 }
90 }).detach();
91 }
92
GetTaskManager()93 shared_ptr<TaskManager>& AvcodecTaskManager::GetTaskManager()
94 {
95 lock_guard<mutex> lock(taskManagerMutex_);
96 if (taskManager_ == nullptr && isActive_.load()) {
97 taskManager_ = make_unique<TaskManager>("AvcodecTaskManager", DEFAULT_THREAD_NUMBER, false);
98 }
99 return taskManager_;
100 }
101
GetEncoderManager()102 shared_ptr<TaskManager>& AvcodecTaskManager::GetEncoderManager()
103 {
104 lock_guard<mutex> lock(encoderManagerMutex_);
105 if (videoEncoderManager_ == nullptr && isActive_.load()) {
106 videoEncoderManager_ = make_unique<TaskManager>("VideoTaskManager", DEFAULT_ENCODER_THREAD_NUMBER, true);
107 }
108 return videoEncoderManager_;
109 }
110
EncodeVideoBuffer(sptr<FrameRecord> frameRecord,CacheCbFunc cacheCallback)111 void AvcodecTaskManager::EncodeVideoBuffer(sptr<FrameRecord> frameRecord, CacheCbFunc cacheCallback)
112 {
113 auto thisPtr = sptr<AvcodecTaskManager>(this);
114 auto encodeManager = GetEncoderManager();
115 CHECK_RETURN(!encodeManager);
116 encodeManager->SubmitTask([thisPtr, frameRecord, cacheCallback]() {
117 CAMERA_SYNC_TRACE;
118 CHECK_RETURN(thisPtr == nullptr);
119 CHECK_RETURN(!thisPtr->videoEncoder_ || !frameRecord);
120 sptr<Surface> movingSurface = thisPtr->movingSurface_.promote();
121 if (movingSurface) {
122 sptr<SurfaceBuffer> codecDetachBuf;
123 thisPtr->videoEncoder_->DetachCodecBuffer(codecDetachBuf, frameRecord);
124 SurfaceError surfaceRet = movingSurface->AttachBufferToQueue(codecDetachBuf);
125 CHECK_PRINT_ELOG(surfaceRet != SURFACE_ERROR_OK, "movingSurface AttachBuffer faild");
126 surfaceRet = movingSurface->ReleaseBuffer(codecDetachBuf, SyncFence::INVALID_FENCE);
127 CHECK_PRINT_ELOG(surfaceRet != SURFACE_ERROR_OK, "movingSurface ReleaseBuffer faild");
128 }
129 bool isEncodeSuccess = thisPtr->videoEncoder_->EncodeSurfaceBuffer(frameRecord);
130 CHECK_PRINT_ELOG(!isEncodeSuccess, "EncodeVideoBuffer faild");
131 frameRecord->SetEncodedResult(isEncodeSuccess);
132 frameRecord->SetFinishStatus();
133 if (isEncodeSuccess) {
134 MEDIA_INFO_LOG("encode image success %{public}s, refCount: %{public}d", frameRecord->GetFrameId().c_str(),
135 frameRecord->GetSptrRefCount());
136 } else {
137 MEDIA_ERR_LOG("encode image fail %{public}s", frameRecord->GetFrameId().c_str());
138 }
139 CHECK_EXECUTE(cacheCallback, cacheCallback(frameRecord, isEncodeSuccess));
140 });
141 }
142
SubmitTask(function<void ()> task)143 void AvcodecTaskManager::SubmitTask(function<void()> task)
144 {
145 auto taskManager = GetTaskManager();
146 CHECK_EXECUTE(taskManager, taskManager->SubmitTask(task));
147 }
148
SetVideoFd(int64_t timestamp,std::shared_ptr<PhotoAssetIntf> photoAssetProxy,int32_t captureId)149 void AvcodecTaskManager::SetVideoFd(
150 int64_t timestamp, std::shared_ptr<PhotoAssetIntf> photoAssetProxy, int32_t captureId)
151 {
152 lock_guard<mutex> lock(videoFdMutex_);
153 MEDIA_INFO_LOG("Set timestamp: %{public}" PRIu64 ", captureId: %{public}d", timestamp, captureId);
154 videoFdMap_.insert(std::make_pair(captureId, std::make_pair(timestamp, photoAssetProxy)));
155 MEDIA_INFO_LOG("video map size:%{public}zu", videoFdMap_.size());
156 cvEmpty_.notify_all();
157 }
158
MovingPhotoNanosecToMillisec(int64_t nanosec)159 constexpr inline float MovingPhotoNanosecToMillisec(int64_t nanosec)
160 {
161 return static_cast<float>(nanosec) / 1000000.0f;
162 }
163
CreateAVMuxer(vector<sptr<FrameRecord>> frameRecords,int32_t captureRotation,vector<sptr<FrameRecord>> & choosedBuffer,int32_t captureId)164 sptr<AudioVideoMuxer> AvcodecTaskManager::CreateAVMuxer(vector<sptr<FrameRecord>> frameRecords, int32_t captureRotation,
165 vector<sptr<FrameRecord>> &choosedBuffer, int32_t captureId)
166 {
167 // LCOV_EXCL_START
168 CAMERA_SYNC_TRACE;
169 unique_lock<mutex> lock(videoFdMutex_);
170 auto thisPtr = sptr<AvcodecTaskManager>(this);
171 if (videoFdMap_.find(captureId) == videoFdMap_.end()) {
172 bool waitResult = false;
173 waitResult = cvEmpty_.wait_for(lock, std::chrono::milliseconds(GET_FD_EXPIREATION_TIME),
174 [thisPtr, captureId] { return thisPtr->videoFdMap_.find(captureId) != thisPtr->videoFdMap_.end(); });
175 CHECK_RETURN_RET(!waitResult || videoFdMap_.find(captureId) == videoFdMap_.end(), nullptr);
176 }
177 sptr<AudioVideoMuxer> muxer = new AudioVideoMuxer();
178 OH_AVOutputFormat format = AV_OUTPUT_FORMAT_MPEG_4;
179 int64_t timestamp = videoFdMap_[captureId].first;
180 auto photoAssetProxy = videoFdMap_[captureId].second;
181 ChooseVideoBuffer(frameRecords, choosedBuffer, timestamp, captureId);
182 muxer->Create(format, photoAssetProxy);
183 muxer->SetRotation(captureRotation);
184 CHECK_EXECUTE(!choosedBuffer.empty(),
185 {
186 muxer->SetCoverTime(MovingPhotoNanosecToMillisec(std::min(timestamp,
187 choosedBuffer.back()->GetTimeStamp()) - choosedBuffer.front()->GetTimeStamp()));
188 muxer->SetStartTime(MovingPhotoNanosecToMillisec(choosedBuffer.front()->GetTimeStamp()));
189 CHECK_EXECUTE(videoEncoder_ != nullptr, muxer->SetSqr(videoEncoder_->GetEncoderBitrate()));
190 }
191 );
192 auto formatVideo = make_shared<Format>();
193 MEDIA_INFO_LOG("CreateAVMuxer videoCodecType_ = %{public}d", videoCodecType_);
194 formatVideo->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, videoCodecType_
195 == VIDEO_ENCODE_TYPE_HEVC ? OH_AVCODEC_MIMETYPE_VIDEO_HEVC : OH_AVCODEC_MIMETYPE_VIDEO_AVC);
196 if (videoCodecType_ == VIDEO_ENCODE_TYPE_HEVC && videoEncoder_->IsHdr(colorSpace_)) {
197 formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_VIDEO_IS_HDR_VIVID, IS_HDR_VIVID);
198 }
199 CHECK_RETURN_RET_ELOG(frameRecords.empty(), nullptr, "frameRecords is empty");
200 formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_WIDTH, frameRecords[0]->GetFrameSize()->width);
201 formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, frameRecords[0]->GetFrameSize()->height);
202 formatVideo->PutDoubleValue(MediaDescriptionKey::MD_KEY_FRAME_RATE, VIDEO_FRAME_RATE);
203 int videoTrackId = -1;
204 muxer->AddTrack(videoTrackId, formatVideo, VIDEO_TRACK);
205 int audioTrackId = -1;
206 #ifdef MOVING_PHOTO_ADD_AUDIO
207 auto formatAudio = make_shared<Format>();
208 formatAudio->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, OH_AVCODEC_MIMETYPE_AUDIO_AAC);
209 formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_SAMPLE_RATE, SAMPLERATE_32000);
210 formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_CHANNEL_COUNT, DEFAULT_CHANNEL_COUNT);
211 formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_PROFILE, DEFAULT_PROFILE);
212 muxer->AddTrack(audioTrackId, formatAudio, AUDIO_TRACK);
213 #endif
214 int metaTrackId = -1;
215 auto formatMeta = make_shared<Format>();
216 formatMeta->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, TIMED_METADATA_TRACK_MIMETYPE);
217 formatMeta->PutStringValue(MediaDescriptionKey::MD_KEY_TIMED_METADATA_KEY, TIMED_METADATA_KEY);
218 formatMeta->PutIntValue(MediaDescriptionKey::MD_KEY_TIMED_METADATA_SRC_TRACK_ID, videoTrackId);
219 muxer->AddTrack(metaTrackId, formatMeta, META_TRACK);
220 MEDIA_INFO_LOG("CreateMuxer vId:%{public}d,aid:%{public}d,mid:%{public}d", videoTrackId, audioTrackId, metaTrackId);
221 muxer->SetTimedMetadata();
222 muxer->Start();
223 return muxer;
224 // LCOV_EXCL_STOP
225 }
226
FinishMuxer(sptr<AudioVideoMuxer> muxer,int32_t captureId)227 void AvcodecTaskManager::FinishMuxer(sptr<AudioVideoMuxer> muxer, int32_t captureId)
228 {
229 CAMERA_SYNC_TRACE;
230 MEDIA_INFO_LOG("doMxuer video is finished");
231 CHECK_RETURN(!muxer);
232 muxer->Stop();
233 muxer->Release();
234 std::shared_ptr<PhotoAssetIntf> proxy = muxer->GetPhotoAssetProxy();
235 MEDIA_INFO_LOG("PhotoAssetProxy notify enter");
236 CHECK_RETURN(!proxy);
237 proxy->NotifyVideoSaveFinished();
238 lock_guard<mutex> lock(videoFdMutex_);
239 videoFdMap_.erase(captureId);
240 MEDIA_INFO_LOG("finishMuxer end, videoFdMap_ size is %{public}zu", videoFdMap_.size());
241 }
242
isEmptyVideoFdMap()243 bool AvcodecTaskManager::isEmptyVideoFdMap()
244 {
245 lock_guard<mutex> lock(videoFdMutex_);
246 return videoFdMap_.empty();
247 }
248
DoMuxerVideo(vector<sptr<FrameRecord>> frameRecords,uint64_t taskName,int32_t captureRotation,int32_t captureId)249 void AvcodecTaskManager::DoMuxerVideo(vector<sptr<FrameRecord>> frameRecords, uint64_t taskName,
250 int32_t captureRotation, int32_t captureId) __attribute__((no_sanitize("cfi")))
251 {
252 CAMERA_SYNC_TRACE;
253 CHECK_RETURN_ELOG(frameRecords.empty(), "DoMuxerVideo error of empty encoded frame");
254 auto thisPtr = sptr<AvcodecTaskManager>(this);
255 auto taskManager = GetTaskManager();
256 CHECK_RETURN_ELOG(taskManager == nullptr, "GetTaskManager is null");
257 GetTaskManager()->SubmitTask([thisPtr, frameRecords, captureRotation, captureId]() {
258 CAMERA_SYNC_TRACE;
259 MEDIA_INFO_LOG("CreateAVMuxer with %{public}zu", frameRecords.size());
260 vector<sptr<FrameRecord>> choosedBuffer;
261 sptr<AudioVideoMuxer> muxer = thisPtr->CreateAVMuxer(frameRecords, captureRotation, choosedBuffer, captureId);
262 CHECK_RETURN_ELOG(muxer == nullptr, "CreateAVMuxer failed");
263 if (choosedBuffer.empty()) {
264 lock_guard<mutex> lock(thisPtr->videoFdMutex_);
265 thisPtr->videoFdMap_.erase(captureId);
266 MEDIA_ERR_LOG("choosed empty buffer, videoFdMap_ size is %{public}zu", thisPtr->videoFdMap_.size());
267 return;
268 }
269 int64_t videoStartTime = choosedBuffer.front()->GetTimeStamp();
270 for (size_t index = 0; index < choosedBuffer.size(); index++) {
271 MEDIA_DEBUG_LOG("write sample index %{public}zu", index);
272 shared_ptr<Media::AVBuffer> buffer = choosedBuffer[index]->encodedBuffer;
273 {
274 std::lock_guard<std::mutex> lock(choosedBuffer[index]->bufferMutex_);
275 OH_AVCodecBufferAttr attr = {0, 0, 0, AVCODEC_BUFFER_FLAGS_NONE};
276 CHECK_CONTINUE_WLOG(buffer == nullptr, "video encodedBuffer is null");
277 buffer->pts_ = NanosecToMicrosec(choosedBuffer[index]->GetTimeStamp() - videoStartTime);
278 MEDIA_DEBUG_LOG("choosed buffer pts:%{public}" PRIu64, attr.pts);
279 muxer->WriteSampleBuffer(buffer, VIDEO_TRACK);
280 }
281 sptr<SurfaceBuffer> metaSurfaceBuffer = frameRecords[index]->GetMetaBuffer();
282 if (metaSurfaceBuffer) {
283 shared_ptr<AVBuffer> metaAvBuffer = AVBuffer::CreateAVBuffer(metaSurfaceBuffer);
284 metaAvBuffer->pts_ = buffer->pts_;
285 MEDIA_DEBUG_LOG("metaAvBuffer pts_ %{public}llu, avBufferSize: %{public}d",
286 (long long unsigned)(metaAvBuffer->pts_), metaAvBuffer->memory_->GetSize());
287 muxer->WriteSampleBuffer(metaAvBuffer, META_TRACK);
288 } else {
289 MEDIA_ERR_LOG("metaSurfaceBuffer is nullptr");
290 }
291 }
292 #ifdef MOVING_PHOTO_ADD_AUDIO
293 // CollectAudioBuffer
294 vector<sptr<AudioRecord>> audioRecords;
295 vector<sptr<AudioRecord>> processedAudioRecords;
296 thisPtr->PrepareAudioBuffer(choosedBuffer, audioRecords, processedAudioRecords);
297 thisPtr->CollectAudioBuffer(processedAudioRecords, muxer);
298 #endif
299 thisPtr->FinishMuxer(muxer, captureId);
300 });
301 }
302
FindIdrFrameIndex(vector<sptr<FrameRecord>> frameRecords,int64_t clearVideoEndTime,int64_t shutterTime,int32_t captureId)303 size_t AvcodecTaskManager::FindIdrFrameIndex(vector<sptr<FrameRecord>> frameRecords, int64_t clearVideoEndTime,
304 int64_t shutterTime, int32_t captureId)
305 {
306 // LCOV_EXCL_START
307 bool isDeblurStartTime = false;
308 std::unique_lock<mutex> startTimeLock(startTimeMutex_);
309 int64_t clearVideoStartTime = shutterTime - preBufferDuration_;
310 if (mPStartTimeMap_.count(captureId) && mPStartTimeMap_[captureId] <= shutterTime
311 && mPStartTimeMap_[captureId] > clearVideoStartTime) {
312 MEDIA_INFO_LOG("set deblur start time is %{public}" PRIu64, mPStartTimeMap_[captureId]);
313 clearVideoStartTime = mPStartTimeMap_[captureId];
314 MEDIA_INFO_LOG("clearVideoEndTime is %{public}" PRIu64, NanosecToMicrosec(clearVideoEndTime));
315 int64_t absoluteValue = abs(clearVideoEndTime - clearVideoStartTime);
316 int64_t deblurThreshold = 264000000L;
317 isDeblurStartTime = absoluteValue < deblurThreshold;
318 }
319 mPStartTimeMap_.erase(captureId);
320 startTimeLock.unlock();
321 MEDIA_INFO_LOG("FindIdrFrameIndex captureId : %{public}d, clearVideoStartTime : %{public}" PRIu64,
322 captureId, clearVideoStartTime);
323 size_t idrIndex = frameRecords.size();
324 if (isDeblurStartTime) {
325 for (size_t index = 0; index < frameRecords.size(); ++index) {
326 auto frame = frameRecords[index];
327 if (frame->IsIDRFrame() && frame->GetTimeStamp() <= clearVideoStartTime) {
328 MEDIA_INFO_LOG("FindIdrFrameIndex before start time");
329 idrIndex = index;
330 }
331 }
332 }
333 if (idrIndex == frameRecords.size()) {
334 for (size_t index = 0; index < frameRecords.size(); ++index) {
335 auto frame = frameRecords[index];
336 if (frame->IsIDRFrame() && frame->GetTimeStamp() >= clearVideoStartTime &&
337 frame->GetTimeStamp() < shutterTime) {
338 MEDIA_INFO_LOG("FindIdrFrameIndex after start time");
339 idrIndex = index;
340 break;
341 }
342 idrIndex = 0;
343 }
344 }
345 return idrIndex;
346 // LCOV_EXCL_STOP
347 }
348
IgnoreDeblur(vector<sptr<FrameRecord>> frameRecords,vector<sptr<FrameRecord>> & choosedBuffer,int64_t shutterTime)349 void AvcodecTaskManager::IgnoreDeblur(vector<sptr<FrameRecord>> frameRecords,
350 vector<sptr<FrameRecord>> &choosedBuffer, int64_t shutterTime)
351 {
352 MEDIA_INFO_LOG("IgnoreDeblur enter");
353 choosedBuffer.clear();
354 CHECK_RETURN(frameRecords.empty());
355 auto it = find_if(
356 frameRecords.begin(), frameRecords.end(), [](const sptr<FrameRecord>& frame) { return frame->IsIDRFrame(); });
357 while (it != frameRecords.end()) {
358 choosedBuffer.emplace_back(*it);
359 ++it;
360 }
361 }
362
ChooseVideoBuffer(vector<sptr<FrameRecord>> frameRecords,vector<sptr<FrameRecord>> & choosedBuffer,int64_t shutterTime,int32_t captureId)363 void AvcodecTaskManager::ChooseVideoBuffer(vector<sptr<FrameRecord>> frameRecords,
364 vector<sptr<FrameRecord>> &choosedBuffer, int64_t shutterTime, int32_t captureId)
365 {
366 CHECK_RETURN_ELOG(frameRecords.empty(), "frameRecords is empty!");
367 // LCOV_EXCL_START
368 choosedBuffer.clear();
369 std::unique_lock<mutex> endTimeLock(endTimeMutex_);
370 int64_t clearVideoEndTime = shutterTime + postBufferDuration_;
371 if (mPEndTimeMap_.count(captureId) && mPEndTimeMap_[captureId] >= shutterTime
372 && mPEndTimeMap_[captureId] < clearVideoEndTime) {
373 MEDIA_INFO_LOG("set deblur end time is %{public}" PRIu64, mPEndTimeMap_[captureId]);
374 clearVideoEndTime = mPEndTimeMap_[captureId];
375 }
376 mPEndTimeMap_.erase(captureId);
377 endTimeLock.unlock();
378 MEDIA_INFO_LOG("ChooseVideoBuffer captureId : %{public}d, shutterTime : %{public}" PRIu64 ", "
379 "clearVideoEndTime : %{public}" PRIu64, captureId, shutterTime, clearVideoEndTime);
380 size_t idrIndex = FindIdrFrameIndex(frameRecords, clearVideoEndTime, shutterTime, captureId);
381 size_t frameCount = 0;
382 for (size_t index = idrIndex; index < frameRecords.size(); ++index) {
383 auto frame = frameRecords[index];
384 int64_t timestamp = frame->GetTimeStamp();
385 if (timestamp <= clearVideoEndTime && frameCount < MAX_FRAME_COUNT) {
386 choosedBuffer.push_back(frame);
387 ++frameCount;
388 }
389 }
390
391 CHECK_EXECUTE(choosedBuffer.size() < MIN_FRAME_RECORD_BUFFER_SIZE || !frameRecords[idrIndex]->IsIDRFrame(),
392 IgnoreDeblur(frameRecords, choosedBuffer, shutterTime));
393 MEDIA_INFO_LOG("ChooseVideoBuffer with size %{public}zu", choosedBuffer.size());
394 // LCOV_EXCL_STOP
395 }
396
PrepareAudioBuffer(vector<sptr<FrameRecord>> & choosedBuffer,vector<sptr<AudioRecord>> & audioRecords,vector<sptr<AudioRecord>> & processedAudioRecords)397 void AvcodecTaskManager::PrepareAudioBuffer(vector<sptr<FrameRecord>>& choosedBuffer,
398 vector<sptr<AudioRecord>>& audioRecords, vector<sptr<AudioRecord>>& processedAudioRecords)
399 {
400 // LCOV_EXCL_START
401 CAMERA_SYNC_TRACE;
402 int64_t videoStartTime = choosedBuffer.front()->GetTimeStamp();
403 if (audioCapturerSession_) {
404 int64_t startTime = NanosecToMillisec(videoStartTime);
405 int64_t endTime = NanosecToMillisec(choosedBuffer.back()->GetTimeStamp());
406 audioCapturerSession_->GetAudioRecords(startTime, endTime, audioRecords);
407 for (auto ptr: audioRecords) {
408 processedAudioRecords.emplace_back(new AudioRecord(ptr->GetTimeStamp()));
409 }
410 std::lock_guard<mutex> lock(deferredProcessMutex_);
411 if (audioDeferredProcess_ == nullptr) {
412 audioDeferredProcess_ = std::make_shared<AudioDeferredProcess>();
413 CHECK_RETURN(!audioDeferredProcess_);
414 audioDeferredProcess_->StoreOptions(audioCapturerSession_->deferredInputOptions_,
415 audioCapturerSession_->deferredOutputOptions_);
416 CHECK_RETURN(audioDeferredProcess_->GetOfflineEffectChain() != 0);
417 CHECK_RETURN(audioDeferredProcess_->ConfigOfflineAudioEffectChain() != 0);
418 CHECK_RETURN(audioDeferredProcess_->PrepareOfflineAudioEffectChain() != 0);
419 CHECK_RETURN(audioDeferredProcess_->GetMaxBufferSize(audioCapturerSession_->deferredInputOptions_,
420 audioCapturerSession_->deferredOutputOptions_) != 0);
421 }
422 audioDeferredProcess_->Process(audioRecords, processedAudioRecords);
423 auto weakThis = wptr<AvcodecTaskManager>(this);
424 if (timerId_) {
425 MEDIA_INFO_LOG("audioDP release time reset, %{public}u", timerId_);
426 CameraTimer::GetInstance().Unregister(timerId_);
427 }
428 auto curObject = audioDeferredProcess_;
429 timerId_ = CameraTimer::GetInstance().Register([weakThis, curObject]()-> void {
430 auto sharedThis = weakThis.promote();
431 CHECK_RETURN(sharedThis == nullptr);
432 std::unique_lock<mutex> lock(sharedThis->deferredProcessMutex_, std::try_to_lock);
433 CHECK_RETURN(curObject != sharedThis->audioDeferredProcess_);
434 CHECK_RETURN(!lock.owns_lock());
435 sharedThis->audioDeferredProcess_ = nullptr;
436 sharedThis->timerId_ = 0;
437 }, RELEASE_WAIT_TIME, true);
438 }
439 // LCOV_EXCL_STOP
440 }
441
CollectAudioBuffer(vector<sptr<AudioRecord>> audioRecordVec,sptr<AudioVideoMuxer> muxer)442 void AvcodecTaskManager::CollectAudioBuffer(vector<sptr<AudioRecord>> audioRecordVec, sptr<AudioVideoMuxer> muxer)
443 {
444 CAMERA_SYNC_TRACE;
445 MEDIA_INFO_LOG("CollectAudioBuffer start with size %{public}zu", audioRecordVec.size());
446 bool isEncodeSuccess = false;
447 CHECK_RETURN_ELOG(!audioEncoder_ || audioRecordVec.empty() || !muxer,
448 "CollectAudioBuffer cannot find useful data");
449 // LCOV_EXCL_START
450 isEncodeSuccess = audioEncoder_->EncodeAudioBuffer(audioRecordVec);
451 MEDIA_DEBUG_LOG("encode audio buffer result %{public}d", isEncodeSuccess);
452 size_t maxFrameCount = std::min(audioRecordVec.size(), MAX_AUDIO_FRAME_COUNT);
453 for (size_t index = 0; index < maxFrameCount; index++) {
454 OH_AVCodecBufferAttr attr = { 0, 0, 0, AVCODEC_BUFFER_FLAGS_NONE };
455 OH_AVBuffer* buffer = audioRecordVec[index]->encodedBuffer;
456 CHECK_CONTINUE_WLOG(buffer == nullptr, "audio encodedBuffer is null");
457 OH_AVBuffer_GetBufferAttr(buffer, &attr);
458 attr.pts = static_cast<int64_t>(index * AUDIO_FRAME_INTERVAL);
459 if (audioRecordVec.size() > 0) {
460 if (index == audioRecordVec.size() - 1) {
461 attr.flags = AVCODEC_BUFFER_FLAGS_EOS;
462 }
463 }
464 OH_AVBuffer_SetBufferAttr(buffer, &attr);
465 muxer->WriteSampleBuffer(buffer->buffer_, AUDIO_TRACK);
466 }
467 MEDIA_INFO_LOG("CollectAudioBuffer finished");
468 // LCOV_EXCL_STOP
469 }
470
Release()471 void AvcodecTaskManager::Release()
472 {
473 CAMERA_SYNC_TRACE;
474 MEDIA_INFO_LOG("AvcodecTaskManager release start");
475 CHECK_EXECUTE(videoEncoder_ != nullptr, videoEncoder_->Release());
476 CHECK_EXECUTE(audioEncoder_ != nullptr, audioEncoder_->Release());
477 CHECK_EXECUTE(timerId_ != 0, CameraTimer::GetInstance().Unregister(timerId_));
478 audioDeferredProcess_ = nullptr;
479 unique_lock<mutex> lock(videoFdMutex_);
480 MEDIA_INFO_LOG("videoFdMap_ size is %{public}zu", videoFdMap_.size());
481 videoFdMap_.clear();
482 MEDIA_INFO_LOG("AvcodecTaskManager release end");
483 }
484
Stop()485 void AvcodecTaskManager::Stop()
486 {
487 CAMERA_SYNC_TRACE;
488 MEDIA_INFO_LOG("AvcodecTaskManager Stop start");
489 CHECK_EXECUTE(videoEncoder_ != nullptr, videoEncoder_->Release());
490 CHECK_EXECUTE(audioEncoder_ != nullptr, audioEncoder_->Release());
491 MEDIA_INFO_LOG("AvcodecTaskManager Stop end");
492 }
493
ClearTaskResource()494 void AvcodecTaskManager::ClearTaskResource()
495 {
496 CAMERA_SYNC_TRACE;
497 MEDIA_INFO_LOG("AvcodecTaskManager ClearTaskResource start");
498 {
499 lock_guard<mutex> lock(taskManagerMutex_);
500 isActive_ = false;
501 if (taskManager_ != nullptr) {
502 taskManager_->CancelAllTasks();
503 taskManager_.reset();
504 }
505 }
506 {
507 lock_guard<mutex> lock(encoderManagerMutex_);
508 isActive_ = false;
509 if (videoEncoderManager_ != nullptr) {
510 videoEncoderManager_->CancelAllTasks();
511 videoEncoderManager_.reset();
512 }
513 }
514 {
515 lock_guard<mutex> lock(startTimeMutex_);
516 mPStartTimeMap_.clear();
517 }
518 {
519 lock_guard<mutex> lock(endTimeMutex_);
520 mPEndTimeMap_.clear();
521 }
522 MEDIA_INFO_LOG("AvcodecTaskManager ClearTaskResource end");
523 }
524
SetVideoBufferDuration(uint32_t preBufferCount,uint32_t postBufferCount)525 void AvcodecTaskManager::SetVideoBufferDuration(uint32_t preBufferCount, uint32_t postBufferCount)
526 {
527 MEDIA_INFO_LOG("AvcodecTaskManager SetVideoBufferCount enter");
528 preBufferDuration_ = static_cast<int64_t>(preBufferCount) * ONE_BILLION / VIDEO_FRAME_RATE;
529 postBufferDuration_ = static_cast<int64_t>(postBufferCount) * ONE_BILLION / VIDEO_FRAME_RATE;
530 }
531 } // namespace CameraStandard
532 } // namespace OHOS