• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2024-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "avcodec_task_manager.h"
17 
18 #include <algorithm>
19 #include <chrono>
20 #include <cinttypes>
21 #include <cstdint>
22 #include <fcntl.h>
23 #include <memory>
24 #include <mutex>
25 #include <unistd.h>
26 #include <utility>
27 #include "datetime_ex.h"
28 #include "camera_util.h"
29 #include "audio_capturer_session.h"
30 #include "audio_record.h"
31 #include "audio_video_muxer.h"
32 #include "audio_deferred_process.h"
33 #include "camera_log.h"
34 #include "frame_record.h"
35 #include "native_avbuffer.h"
36 #include "native_avbuffer_info.h"
37 #include "sample_info.h"
38 #include "native_mfmagic.h"
39 
40 namespace {
41 using namespace std::string_literals;
42 using namespace std::chrono_literals;
43 } // namespace
44 namespace OHOS {
45 namespace CameraStandard {
46 
~AvcodecTaskManager()47 AvcodecTaskManager::~AvcodecTaskManager()
48 {
49     CAMERA_SYNC_TRACE;
50     Release();
51 }
52 
AvcodecTaskManager(sptr<AudioCapturerSession> audioCaptureSession,VideoCodecType type,ColorSpace colorSpace)53 AvcodecTaskManager::AvcodecTaskManager(sptr<AudioCapturerSession> audioCaptureSession,
54     VideoCodecType type, ColorSpace colorSpace) : videoCodecType_(type), colorSpace_(colorSpace)
55 {
56     CAMERA_SYNC_TRACE;
57     #ifdef MOVING_PHOTO_ADD_AUDIO
58     audioCapturerSession_ = audioCaptureSession;
59     audioEncoder_ = make_unique<AudioEncoder>();
60     #endif
61     // Create Task Manager
62     videoEncoder_ = make_shared<VideoEncoder>(type, colorSpace);
63 }
64 
GetTaskManager()65 shared_ptr<TaskManager>& AvcodecTaskManager::GetTaskManager()
66 {
67     lock_guard<mutex> lock(taskManagerMutex_);
68     if (taskManager_ == nullptr && isActive_.load()) {
69         taskManager_ = make_unique<TaskManager>("AvcodecTaskManager", DEFAULT_THREAD_NUMBER, false);
70     }
71     return taskManager_;
72 }
73 
GetEncoderManager()74 shared_ptr<TaskManager>& AvcodecTaskManager::GetEncoderManager()
75 {
76     lock_guard<mutex> lock(encoderManagerMutex_);
77     if (videoEncoderManager_ == nullptr && isActive_.load()) {
78         videoEncoderManager_ = make_unique<TaskManager>("VideoTaskManager", DEFAULT_ENCODER_THREAD_NUMBER, true);
79     }
80     return videoEncoderManager_;
81 }
82 
EncodeVideoBuffer(sptr<FrameRecord> frameRecord,CacheCbFunc cacheCallback)83 void AvcodecTaskManager::EncodeVideoBuffer(sptr<FrameRecord> frameRecord, CacheCbFunc cacheCallback)
84 {
85     auto thisPtr = sptr<AvcodecTaskManager>(this);
86     auto encodeManager = GetEncoderManager();
87     CHECK_ERROR_RETURN(!encodeManager);
88     encodeManager->SubmitTask([thisPtr, frameRecord, cacheCallback]() {
89         CAMERA_SYNC_TRACE;
90         bool isEncodeSuccess = false;
91         CHECK_ERROR_RETURN(!thisPtr->videoEncoder_ && !frameRecord);
92         isEncodeSuccess = thisPtr->videoEncoder_->EncodeSurfaceBuffer(frameRecord);
93         if (isEncodeSuccess) {
94             thisPtr->videoEncoder_->ReleaseSurfaceBuffer(frameRecord);
95         } else {
96             sptr<SurfaceBuffer> releaseBuffer;
97             thisPtr->videoEncoder_->DetachCodecBuffer(releaseBuffer, frameRecord);
98         }
99         frameRecord->SetEncodedResult(isEncodeSuccess);
100         frameRecord->SetFinishStatus();
101         if (isEncodeSuccess) {
102             MEDIA_INFO_LOG("encode image success %{public}s, refCount: %{public}d", frameRecord->GetFrameId().c_str(),
103                 frameRecord->GetSptrRefCount());
104         } else {
105             MEDIA_ERR_LOG("encode image fail %{public}s", frameRecord->GetFrameId().c_str());
106         }
107         CHECK_EXECUTE(cacheCallback, cacheCallback(frameRecord, isEncodeSuccess));
108     });
109 }
110 
SubmitTask(function<void ()> task)111 void AvcodecTaskManager::SubmitTask(function<void()> task)
112 {
113     auto taskManager = GetTaskManager();
114     CHECK_EXECUTE(taskManager, taskManager->SubmitTask(task));
115 }
116 
SetVideoFd(int64_t timestamp,std::shared_ptr<PhotoAssetIntf> photoAssetProxy,int32_t captureId)117 void AvcodecTaskManager::SetVideoFd(
118     int64_t timestamp, std::shared_ptr<PhotoAssetIntf> photoAssetProxy, int32_t captureId)
119 {
120     lock_guard<mutex> lock(videoFdMutex_);
121     MEDIA_INFO_LOG("Set timestamp: %{public}" PRId64 ", captureId: %{public}d", timestamp, captureId);
122     videoFdMap_.insert(std::make_pair(captureId, std::make_pair(timestamp, photoAssetProxy)));
123     MEDIA_DEBUG_LOG("video map size:%{public}zu", videoFdMap_.size());
124     cvEmpty_.notify_all();
125 }
126 
CreateAVMuxer(vector<sptr<FrameRecord>> frameRecords,int32_t captureRotation,vector<sptr<FrameRecord>> & choosedBuffer,int32_t captureId)127 sptr<AudioVideoMuxer> AvcodecTaskManager::CreateAVMuxer(vector<sptr<FrameRecord>> frameRecords, int32_t captureRotation,
128     vector<sptr<FrameRecord>> &choosedBuffer, int32_t captureId)
129 {
130     CAMERA_SYNC_TRACE;
131     unique_lock<mutex> lock(videoFdMutex_);
132     auto thisPtr = sptr<AvcodecTaskManager>(this);
133     if (videoFdMap_.find(captureId) == videoFdMap_.end()) {
134         bool waitResult = false;
135         waitResult = cvEmpty_.wait_for(lock, std::chrono::milliseconds(GET_FD_EXPIREATION_TIME),
136             [thisPtr, captureId] { return thisPtr->videoFdMap_.find(captureId) != thisPtr->videoFdMap_.end(); });
137         CHECK_ERROR_RETURN_RET(!waitResult || videoFdMap_.find(captureId) == videoFdMap_.end(), nullptr);
138     }
139     sptr<AudioVideoMuxer> muxer = new AudioVideoMuxer();
140     OH_AVOutputFormat format = AV_OUTPUT_FORMAT_MPEG_4;
141     int64_t timestamp = videoFdMap_[captureId].first;
142     auto photoAssetProxy = videoFdMap_[captureId].second;
143     videoFdMap_.erase(captureId);
144     ChooseVideoBuffer(frameRecords, choosedBuffer, timestamp, captureId);
145     muxer->Create(format, photoAssetProxy);
146     muxer->SetRotation(captureRotation);
147     CHECK_EXECUTE(!choosedBuffer.empty(),
148         muxer->SetCoverTime(NanosecToMillisec(std::min(timestamp, choosedBuffer.back()->GetTimeStamp())
149         - choosedBuffer.front()->GetTimeStamp())));
150     auto formatVideo = make_shared<Format>();
151     MEDIA_INFO_LOG("CreateAVMuxer videoCodecType_ = %{public}d", videoCodecType_);
152     formatVideo->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, videoCodecType_
153         == VIDEO_ENCODE_TYPE_HEVC ? OH_AVCODEC_MIMETYPE_VIDEO_HEVC : OH_AVCODEC_MIMETYPE_VIDEO_AVC);
154     if (videoCodecType_ == VIDEO_ENCODE_TYPE_HEVC && videoEncoder_->IsHdr(colorSpace_)) {
155         formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_VIDEO_IS_HDR_VIVID, IS_HDR_VIVID);
156     }
157     formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_WIDTH, frameRecords[0]->GetFrameSize()->width);
158     formatVideo->PutIntValue(MediaDescriptionKey::MD_KEY_HEIGHT, frameRecords[0]->GetFrameSize()->height);
159     formatVideo->PutDoubleValue(MediaDescriptionKey::MD_KEY_FRAME_RATE, VIDEO_FRAME_RATE);
160     int videoTrackId = -1;
161     muxer->AddTrack(videoTrackId, formatVideo, VIDEO_TRACK);
162     int audioTrackId = -1;
163     #ifdef MOVING_PHOTO_ADD_AUDIO
164     auto formatAudio = make_shared<Format>();
165     formatAudio->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, OH_AVCODEC_MIMETYPE_AUDIO_AAC);
166     formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_SAMPLE_RATE, SAMPLERATE_32000);
167     formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_CHANNEL_COUNT, DEFAULT_CHANNEL_COUNT);
168     formatAudio->PutIntValue(MediaDescriptionKey::MD_KEY_PROFILE, DEFAULT_PROFILE);
169     muxer->AddTrack(audioTrackId, formatAudio, AUDIO_TRACK);
170     #endif
171     int metaTrackId = -1;
172     auto formatMeta = make_shared<Format>();
173     formatMeta->PutStringValue(MediaDescriptionKey::MD_KEY_CODEC_MIME, TIMED_METADATA_TRACK_MIMETYPE);
174     formatMeta->PutStringValue(MediaDescriptionKey::MD_KEY_TIMED_METADATA_KEY, TIMED_METADATA_KEY);
175     formatMeta->PutIntValue(MediaDescriptionKey::MD_KEY_TIMED_METADATA_SRC_TRACK_ID, videoTrackId);
176     muxer->AddTrack(metaTrackId, formatMeta, META_TRACK);
177     MEDIA_INFO_LOG("CreateMuxer vId:%{public}d,aid:%{public}d,mid:%{public}d", videoTrackId, audioTrackId, metaTrackId);
178     muxer->SetTimedMetadata();
179     muxer->Start();
180     return muxer;
181 }
182 
FinishMuxer(sptr<AudioVideoMuxer> muxer)183 void AvcodecTaskManager::FinishMuxer(sptr<AudioVideoMuxer> muxer)
184 {
185     CAMERA_SYNC_TRACE;
186     MEDIA_INFO_LOG("doMxuer video is finished");
187     if (muxer) {
188         muxer->Stop();
189         muxer->Release();
190         std::shared_ptr<PhotoAssetIntf> proxy = muxer->GetPhotoAssetProxy();
191         MEDIA_INFO_LOG("PhotoAssetProxy notify enter");
192         CHECK_EXECUTE(proxy, proxy->NotifyVideoSaveFinished());
193     }
194 }
195 
DoMuxerVideo(vector<sptr<FrameRecord>> frameRecords,uint64_t taskName,int32_t captureRotation,int32_t captureId)196 void AvcodecTaskManager::DoMuxerVideo(vector<sptr<FrameRecord>> frameRecords, uint64_t taskName,
197     int32_t captureRotation, int32_t captureId) __attribute__((no_sanitize("cfi")))
198 {
199     CAMERA_SYNC_TRACE;
200     CHECK_ERROR_RETURN_LOG(frameRecords.empty(), "DoMuxerVideo error of empty encoded frame");
201     auto thisPtr = sptr<AvcodecTaskManager>(this);
202     auto taskManager = GetTaskManager();
203     CHECK_ERROR_RETURN_LOG(taskManager == nullptr, "GetTaskManager is null");
204     GetTaskManager()->SubmitTask([thisPtr, frameRecords, captureRotation, captureId]() {
205         CAMERA_SYNC_TRACE;
206         MEDIA_INFO_LOG("CreateAVMuxer with %{public}zu", frameRecords.size());
207         vector<sptr<FrameRecord>> choosedBuffer;
208         sptr<AudioVideoMuxer> muxer = thisPtr->CreateAVMuxer(frameRecords, captureRotation, choosedBuffer, captureId);
209         CHECK_ERROR_RETURN_LOG(muxer == nullptr, "CreateAVMuxer failed");
210         CHECK_ERROR_RETURN_LOG(choosedBuffer.empty(), "choosed empty buffer!");
211         int64_t videoStartTime = choosedBuffer.front()->GetTimeStamp();
212         for (size_t index = 0; index < choosedBuffer.size(); index++) {
213             MEDIA_DEBUG_LOG("write sample index %{public}zu", index);
214             shared_ptr<Media::AVBuffer> buffer = choosedBuffer[index]->encodedBuffer;
215             int32_t ret = AV_ERR_OK;
216             {
217                 std::lock_guard<std::mutex> lock(choosedBuffer[index]->bufferMutex_);
218                 CHECK_WARNING_CONTINUE_LOG(buffer == nullptr, "video encodedBuffer is null");
219                 buffer->pts_ = NanosecToMicrosec(choosedBuffer[index]->GetTimeStamp() - videoStartTime);
220                 MEDIA_DEBUG_LOG("choosed buffer pts:%{public}" PRIu64, choosedBuffer[index]->GetTimeStamp());
221                 ret = muxer->WriteSampleBuffer(buffer, VIDEO_TRACK);
222             }
223             sptr<SurfaceBuffer> metaSurfaceBuffer = choosedBuffer[index]->GetMetaBuffer();
224             if (metaSurfaceBuffer && ret == AV_ERR_OK) {
225                 shared_ptr<AVBuffer> metaAvBuffer = AVBuffer::CreateAVBuffer(metaSurfaceBuffer);
226                  metaAvBuffer->pts_ = buffer->pts_;
227                 MEDIA_DEBUG_LOG("metaAvBuffer pts_ %{public}llu, avBufferSize: %{public}d",
228                     (long long unsigned)(metaAvBuffer->pts_), metaAvBuffer->memory_->GetSize());
229                 muxer->WriteSampleBuffer(metaAvBuffer, META_TRACK);
230             } else {
231                 MEDIA_ERR_LOG("metaSurfaceBuffer ret %{public}d", ret);
232             }
233         }
234         #ifdef MOVING_PHOTO_ADD_AUDIO
235         // CollectAudioBuffer
236         vector<sptr<AudioRecord>> audioRecords;
237         vector<sptr<AudioRecord>> processedAudioRecords;
238         thisPtr->PrepareAudioBuffer(choosedBuffer, audioRecords, processedAudioRecords);
239         thisPtr->CollectAudioBuffer(processedAudioRecords, muxer);
240         #endif
241         thisPtr->FinishMuxer(muxer);
242     });
243 }
244 
FindIdrFrameIndex(vector<sptr<FrameRecord>> frameRecords,int64_t clearVideoEndTime,int64_t shutterTime,int32_t captureId)245 size_t AvcodecTaskManager::FindIdrFrameIndex(vector<sptr<FrameRecord>> frameRecords, int64_t clearVideoEndTime,
246     int64_t shutterTime, int32_t captureId)
247 {
248     bool isDeblurStartTime = false;
249     std::unique_lock<mutex> startTimeLock(startTimeMutex_);
250     int64_t clearVideoStartTime = shutterTime - preBufferDuration_;
251     if (mPStartTimeMap_.count(captureId) && mPStartTimeMap_[captureId] <= shutterTime
252         && mPStartTimeMap_[captureId] > clearVideoStartTime) {
253         MEDIA_INFO_LOG("set deblur start time is %{public}" PRId64, mPStartTimeMap_[captureId]);
254         clearVideoStartTime = mPStartTimeMap_[captureId];
255         MEDIA_INFO_LOG("clearVideoEndTime is %{public}" PRId64, NanosecToMicrosec(clearVideoEndTime));
256         int64_t absoluteValue = abs(clearVideoEndTime - clearVideoStartTime);
257         int64_t deblurThreshold = 264000000L;
258         isDeblurStartTime = absoluteValue < deblurThreshold;
259     }
260     mPStartTimeMap_.erase(captureId);
261     startTimeLock.unlock();
262     MEDIA_INFO_LOG("FindIdrFrameIndex captureId : %{public}d, clearVideoStartTime : %{public}" PRId64,
263         captureId, clearVideoStartTime);
264     size_t idrIndex = frameRecords.size();
265     if (isDeblurStartTime) {
266         for (size_t index = 0; index < frameRecords.size(); ++index) {
267             auto frame = frameRecords[index];
268             if (frame->IsIDRFrame() && frame->GetTimeStamp() <= clearVideoStartTime) {
269                 MEDIA_INFO_LOG("FindIdrFrameIndex before start time");
270                 idrIndex = index;
271             }
272         }
273     }
274     if (idrIndex == frameRecords.size()) {
275         for (size_t index = 0; index < frameRecords.size(); ++index) {
276             auto frame = frameRecords[index];
277             if (frame->IsIDRFrame() && frame->GetTimeStamp() >= clearVideoStartTime) {
278                 MEDIA_INFO_LOG("FindIdrFrameIndex after start time");
279                 idrIndex = index;
280                 break;
281             }
282             idrIndex = 0;
283         }
284     }
285     return idrIndex;
286 }
287 
IgnoreDeblur(vector<sptr<FrameRecord>> frameRecords,vector<sptr<FrameRecord>> & choosedBuffer,int64_t shutterTime)288 void AvcodecTaskManager::IgnoreDeblur(vector<sptr<FrameRecord>> frameRecords,
289     vector<sptr<FrameRecord>> &choosedBuffer, int64_t shutterTime)
290 {
291     MEDIA_INFO_LOG("IgnoreDeblur enter");
292     choosedBuffer.clear();
293     if (!frameRecords.empty()) {
294         auto it = find_if(frameRecords.begin(), frameRecords.end(),
295             [](const sptr<FrameRecord>& frame) { return frame->IsIDRFrame(); });
296         while (it != frameRecords.end()) {
297             choosedBuffer.emplace_back(*it);
298             ++it;
299         }
300     }
301 }
302 
ChooseVideoBuffer(vector<sptr<FrameRecord>> frameRecords,vector<sptr<FrameRecord>> & choosedBuffer,int64_t shutterTime,int32_t captureId)303 void AvcodecTaskManager::ChooseVideoBuffer(vector<sptr<FrameRecord>> frameRecords,
304     vector<sptr<FrameRecord>> &choosedBuffer, int64_t shutterTime, int32_t captureId)
305 {
306     CHECK_ERROR_RETURN_LOG(frameRecords.empty(), "frameRecords is empty!");
307     choosedBuffer.clear();
308     std::unique_lock<mutex> endTimeLock(endTimeMutex_);
309     int64_t clearVideoEndTime = shutterTime + postBufferDuration_;
310     if (mPEndTimeMap_.count(captureId) && mPEndTimeMap_[captureId] >= shutterTime
311         && mPEndTimeMap_[captureId] < clearVideoEndTime) {
312         MEDIA_INFO_LOG("set deblur end time is %{public}" PRId64, mPEndTimeMap_[captureId]);
313         clearVideoEndTime = mPEndTimeMap_[captureId];
314     }
315     mPEndTimeMap_.erase(captureId);
316     endTimeLock.unlock();
317     MEDIA_INFO_LOG("ChooseVideoBuffer captureId : %{public}d, shutterTime : %{public}" PRId64 ", "
318         "clearVideoEndTime : %{public}" PRId64, captureId, shutterTime, clearVideoEndTime);
319     size_t idrIndex = FindIdrFrameIndex(frameRecords, clearVideoEndTime, shutterTime, captureId);
320     size_t frameCount = 0;
321     for (size_t index = idrIndex; index < frameRecords.size(); ++index) {
322         auto frame = frameRecords[index];
323         int64_t timestamp = frame->GetTimeStamp();
324         if (timestamp <= clearVideoEndTime && frameCount < MAX_FRAME_COUNT) {
325             choosedBuffer.push_back(frame);
326             ++frameCount;
327         }
328     }
329 
330     CHECK_EXECUTE(choosedBuffer.size() < MIN_FRAME_RECORD_BUFFER_SIZE || !frameRecords[idrIndex]->IsIDRFrame(),
331         IgnoreDeblur(frameRecords, choosedBuffer, shutterTime));
332     MEDIA_INFO_LOG("ChooseVideoBuffer with size %{public}zu", choosedBuffer.size());
333 }
334 
PrepareAudioBuffer(vector<sptr<FrameRecord>> & choosedBuffer,vector<sptr<AudioRecord>> & audioRecords,vector<sptr<AudioRecord>> & processedAudioRecords)335 void AvcodecTaskManager::PrepareAudioBuffer(vector<sptr<FrameRecord>>& choosedBuffer,
336     vector<sptr<AudioRecord>>& audioRecords, vector<sptr<AudioRecord>>& processedAudioRecords)
337 {
338     CAMERA_SYNC_TRACE;
339     int64_t videoStartTime = choosedBuffer.front()->GetTimeStamp();
340     if (audioCapturerSession_) {
341         int64_t startTime = NanosecToMillisec(videoStartTime);
342         int64_t endTime = NanosecToMillisec(choosedBuffer.back()->GetTimeStamp());
343         audioCapturerSession_->GetAudioRecords(startTime, endTime, audioRecords);
344         for (auto ptr: audioRecords) {
345             processedAudioRecords.emplace_back(new AudioRecord(ptr->GetTimeStamp()));
346         }
347         std::lock_guard<mutex> lock(deferredProcessMutex_);
348         if (audioDeferredProcess_ == nullptr) {
349             audioDeferredProcess_ = std::make_shared<AudioDeferredProcess>();
350             CHECK_ERROR_RETURN(!audioDeferredProcess_);
351             audioDeferredProcess_->StoreOptions(audioCapturerSession_->deferredInputOptions_,
352                 audioCapturerSession_->deferredOutputOptions_);
353             CHECK_ERROR_RETURN(audioDeferredProcess_->GetOfflineEffectChain() != 0);
354             CHECK_ERROR_RETURN(audioDeferredProcess_->ConfigOfflineAudioEffectChain() != 0);
355             CHECK_ERROR_RETURN(audioDeferredProcess_->PrepareOfflineAudioEffectChain() != 0);
356             CHECK_ERROR_RETURN(audioDeferredProcess_->GetMaxBufferSize(audioCapturerSession_->deferredInputOptions_,
357                 audioCapturerSession_->deferredOutputOptions_) != 0);
358         }
359         audioDeferredProcess_->Process(audioRecords, processedAudioRecords);
360         auto weakThis = wptr<AvcodecTaskManager>(this);
361         if (timerId_) {
362             MEDIA_INFO_LOG("audioDP release time reset, %{public}u", timerId_);
363             CameraTimer::GetInstance().Unregister(timerId_);
364         }
365         auto curObject = audioDeferredProcess_;
366         timerId_ = CameraTimer::GetInstance().Register([weakThis, curObject]()-> void {
367             auto sharedThis = weakThis.promote();
368             CHECK_ERROR_RETURN(sharedThis == nullptr);
369             std::unique_lock<mutex> lock(sharedThis->deferredProcessMutex_, std::try_to_lock);
370             CHECK_ERROR_RETURN(curObject != sharedThis->audioDeferredProcess_);
371             CHECK_ERROR_RETURN(!lock.owns_lock());
372             sharedThis->audioDeferredProcess_ = nullptr;
373             sharedThis->timerId_ = 0;
374         }, RELEASE_WAIT_TIME, true);
375     }
376 }
377 
CollectAudioBuffer(vector<sptr<AudioRecord>> audioRecordVec,sptr<AudioVideoMuxer> muxer)378 void AvcodecTaskManager::CollectAudioBuffer(vector<sptr<AudioRecord>> audioRecordVec, sptr<AudioVideoMuxer> muxer)
379 {
380     CAMERA_SYNC_TRACE;
381     MEDIA_INFO_LOG("CollectAudioBuffer start with size %{public}zu", audioRecordVec.size());
382     bool isEncodeSuccess = false;
383     CHECK_ERROR_RETURN_LOG(!audioEncoder_ || audioRecordVec.empty() || !muxer,
384         "CollectAudioBuffer cannot find useful data");
385     isEncodeSuccess = audioEncoder_->EncodeAudioBuffer(audioRecordVec);
386     MEDIA_DEBUG_LOG("encode audio buffer result %{public}d", isEncodeSuccess);
387     size_t maxFrameCount = std::min(audioRecordVec.size(), MAX_AUDIO_FRAME_COUNT);
388     for (size_t index = 0; index < maxFrameCount; index++) {
389         OH_AVCodecBufferAttr attr = { 0, 0, 0, AVCODEC_BUFFER_FLAGS_NONE };
390         OH_AVBuffer* buffer = audioRecordVec[index]->encodedBuffer;
391         CHECK_WARNING_CONTINUE_LOG(buffer == nullptr, "audio encodedBuffer is null");
392         OH_AVBuffer_GetBufferAttr(buffer, &attr);
393         attr.pts = static_cast<int64_t>(index * AUDIO_FRAME_INTERVAL);
394         if (audioRecordVec.size() > 0) {
395             if (index == audioRecordVec.size() - 1) {
396                 attr.flags = AVCODEC_BUFFER_FLAGS_EOS;
397             }
398         }
399         OH_AVBuffer_SetBufferAttr(buffer, &attr);
400         muxer->WriteSampleBuffer(buffer->buffer_, AUDIO_TRACK);
401     }
402     MEDIA_INFO_LOG("CollectAudioBuffer finished");
403 }
404 
Release()405 void AvcodecTaskManager::Release()
406 {
407     CAMERA_SYNC_TRACE;
408     MEDIA_INFO_LOG("AvcodecTaskManager release start");
409     CHECK_EXECUTE(videoEncoder_ != nullptr, videoEncoder_->Release());
410     CHECK_EXECUTE(audioEncoder_ != nullptr, audioEncoder_->Release());
411     CHECK_EXECUTE(timerId_ != 0, CameraTimer::GetInstance().Unregister(timerId_));
412     audioDeferredProcess_ = nullptr;
413     unique_lock<mutex> lock(videoFdMutex_);
414     MEDIA_INFO_LOG("videoFdMap_ size is %{public}zu", videoFdMap_.size());
415     videoFdMap_.clear();
416     MEDIA_INFO_LOG("AvcodecTaskManager release end");
417 }
418 
Stop()419 void AvcodecTaskManager::Stop()
420 {
421     CAMERA_SYNC_TRACE;
422     MEDIA_INFO_LOG("AvcodecTaskManager Stop start");
423     CHECK_EXECUTE(videoEncoder_ != nullptr, videoEncoder_->Release());
424     CHECK_EXECUTE(audioEncoder_ != nullptr, audioEncoder_->Release());
425     MEDIA_INFO_LOG("AvcodecTaskManager Stop end");
426 }
427 
ClearTaskResource()428 void AvcodecTaskManager::ClearTaskResource()
429 {
430     CAMERA_SYNC_TRACE;
431     MEDIA_INFO_LOG("AvcodecTaskManager ClearTaskResource start");
432     {
433         lock_guard<mutex> lock(taskManagerMutex_);
434         isActive_ = false;
435         if (taskManager_ != nullptr) {
436             taskManager_->CancelAllTasks();
437             taskManager_.reset();
438         }
439     }
440     {
441         lock_guard<mutex> lock(encoderManagerMutex_);
442         isActive_ = false;
443         if (videoEncoderManager_ != nullptr) {
444             videoEncoderManager_->CancelAllTasks();
445             videoEncoderManager_.reset();
446         }
447     }
448     {
449         lock_guard<mutex> lock(startTimeMutex_);
450         mPStartTimeMap_.clear();
451     }
452     {
453         lock_guard<mutex> lock(endTimeMutex_);
454         mPEndTimeMap_.clear();
455     }
456     MEDIA_INFO_LOG("AvcodecTaskManager ClearTaskResource end");
457 }
458 
SetVideoBufferDuration(uint32_t preBufferCount,uint32_t postBufferCount)459 void AvcodecTaskManager::SetVideoBufferDuration(uint32_t preBufferCount, uint32_t postBufferCount)
460 {
461     MEDIA_INFO_LOG("AvcodecTaskManager SetVideoBufferCount enter");
462     preBufferDuration_ = static_cast<int64_t>(preBufferCount) * ONE_BILLION / VIDEO_FRAME_RATE;
463     postBufferDuration_ = static_cast<int64_t>(postBufferCount) * ONE_BILLION / VIDEO_FRAME_RATE;
464 }
465 } // namespace CameraStandard
466 } // namespace OHOS