1 /*
2 * Copyright (C) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "video_sample.h"
17
18 #include <chrono>
19 #include <unistd.h>
20
21 #include "native_avformat.h"
22 #include "sync_fence.h"
23
24 #include "video_processing_callback_impl.h"
25 #include "video_processing_callback_native.h"
26 #include "video_processing_impl.h"
27 #include "video_processing_capi_capability.h"
28
29 using namespace OHOS;
30 using namespace OHOS::Media::VideoProcessingEngine;
31 using namespace std;
32 using OHOS::Surface;
33 using std::mutex;
34 using OHOS::SurfaceBuffer;
35 using OHOS::IBufferConsumerListener;
36
37 namespace {
38 constexpr std::chrono::seconds STOP_TIMEOUT(10);
39 const int INPUT_FRAME_NUMBER = 10;
40 const int QUEUE_BUFFER_SIZE = 5;
41 }
42
43 class TestConsumerListener : public IBufferConsumerListener {
44 public:
TestConsumerListener(VideoSample * sample)45 explicit TestConsumerListener(VideoSample *sample) : sample_(sample) {};
~TestConsumerListener()46 ~TestConsumerListener() {}
OnBufferAvailable()47 void OnBufferAvailable() override
48 {
49 sample_->OnBufferAvailable();
50 }
51
52 private:
53 VideoSample *sample_ = nullptr;
54 };
55
OnError(OH_VideoProcessing * videoProcessor,VideoProcessing_ErrorCode error,void * userData)56 static void OnError(OH_VideoProcessing* videoProcessor, VideoProcessing_ErrorCode error, void* userData)
57 {
58 VideoSample* sample = reinterpret_cast<VideoSample*>(userData);
59 sample->UpdateErrorCount();
60 }
61
OnState(OH_VideoProcessing * videoProcessor,VideoProcessing_State state,void * userData)62 static void OnState(OH_VideoProcessing* videoProcessor, VideoProcessing_State state, void* userData)
63 {
64 VideoSample* sample = reinterpret_cast<VideoSample*>(userData);
65 if (state == VIDEO_PROCESSING_STATE_STOPPED) {
66 sample->NotifyCv();
67 }
68 }
69
OnNewOutputBufferCall(OH_VideoProcessing * videoProcessor,uint32_t index,void * userData)70 static void OnNewOutputBufferCall(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData)
71 {
72 VideoSample* sample = reinterpret_cast<VideoSample*>(userData);
73 VideoProcessing_ErrorCode ret = OH_VideoProcessing_RenderOutputBuffer(videoProcessor, index);
74 if (ret != VIDEO_PROCESSING_SUCCESS) {
75 sample->UpdateErrorCount();
76 }
77 }
78
OnNewOutputBufferCallImpl(OH_VideoProcessing * videoProcessor,uint32_t index,void * userData)79 static void OnNewOutputBufferCallImpl(OH_VideoProcessing* videoProcessor, uint32_t index, void* userData)
80 {
81 VideoSample* sample = reinterpret_cast<VideoSample*>(userData);
82 VideoProcessing_ErrorCode ret = videoProcessor->GetVideoProcessing()->RenderOutputBuffer(index);
83 if (ret != VIDEO_PROCESSING_SUCCESS) {
84 sample->UpdateErrorCount();
85 }
86 }
87
VideoSample()88 VideoSample::VideoSample()
89 {
90 }
91
~VideoSample()92 VideoSample::~VideoSample()
93 {
94 if (callback_ || callbackImpl_) {
95 if (!isImpl_) {
96 OH_VideoProcessingCallback_Destroy(callback_);
97 callback_ = nullptr;
98 } else {
99 VideoProcessing_Callback::Destroy(callbackImpl_);
100 callbackImpl_ = nullptr;
101 }
102 }
103 if (rect_) {
104 delete rect_;
105 rect_ = nullptr;
106 }
107 if (!isImpl_) {
108 OH_VideoProcessing_Destroy(videoProcessor_);
109 } else {
110 OH_VideoProcessing::Destroy(videoProcessorImpl_);
111 }
112 if (cs_) {
113 for (; !inputBufferAvilQue_.empty(); inputBufferAvilQue_.pop()) {
114 cs_->ReleaseBuffer(inputBufferAvilQue_.front(), -1);
115 }
116 cs_->UnregisterConsumerListener();
117 }
118 cs_ = nullptr;
119 OH_VideoProcessing_DeinitializeEnvironment();
120 }
121
InitVideoSample(VideoProcessParam param)122 int32_t VideoSample::InitVideoSample(VideoProcessParam param)
123 {
124 OH_VideoProcessing_InitializeEnvironment();
125 param_ = param;
126 OH_VideoProcessing_Create(&videoProcessor_, VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER);
127 cs_ = Surface::CreateSurfaceAsConsumer();
128 sptr<IBufferConsumerListener> listener = new TestConsumerListener(this);
129 cs_->RegisterConsumerListener(listener);
130 auto p = cs_->GetProducer();
131 sptr<Surface> ps = Surface::CreateSurfaceAsProducer(p);
132 outWindow_ = CreateNativeWindowFromSurface(&ps);
133 cs_->SetQueueSize(QUEUE_BUFFER_SIZE);
134 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow_, SET_BUFFER_GEOMETRY, param_.outWidth, param_.outHeight);
135 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow_, SET_USAGE,
136 NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE |
137 NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER);
138 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow_, SET_FORMAT, param_.outFmt);
139 OH_VideoProcessing_SetSurface(videoProcessor_, outWindow_);
140 OH_VideoProcessing_GetSurface(videoProcessor_, &inWindow_);
141 SetInputWindowParam();
142 OH_VideoProcessingCallback_Create(&callback_);
143 OH_VideoProcessingCallback_BindOnError(callback_, OnError);
144 OH_VideoProcessingCallback_BindOnState(callback_, OnState);
145 OH_VideoProcessingCallback_BindOnNewOutputBuffer(callback_, OnNewOutputBufferCall);
146 OH_VideoProcessing_RegisterCallback(videoProcessor_, callback_, this);
147 OH_AVFormat* parameter = OH_AVFormat_Create();
148 OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, qualityLevel_);
149 OH_VideoProcessing_SetParameter(videoProcessor_, parameter);
150 return VIDEO_PROCESSING_SUCCESS;
151 }
152
InitVideoSampleImpl(VideoProcessParam param)153 int32_t VideoSample::InitVideoSampleImpl(VideoProcessParam param)
154 {
155 param_ = param;
156 OH_VideoProcessing::Create(&videoProcessorImpl_, VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER,
157 VideoProcessingCapiCapability::GetOpenGLContext());
158 cs_ = Surface::CreateSurfaceAsConsumer();
159 sptr<IBufferConsumerListener> listener = new TestConsumerListener(this);
160 cs_->RegisterConsumerListener(listener);
161 auto p = cs_->GetProducer();
162 sptr<Surface> ps = Surface::CreateSurfaceAsProducer(p);
163 outWindow_ = CreateNativeWindowFromSurface(&ps);
164 cs_->SetQueueSize(QUEUE_BUFFER_SIZE);
165 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow_, SET_BUFFER_GEOMETRY, param_.outWidth, param_.outHeight);
166 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow_, SET_USAGE,
167 NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE |
168 NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER);
169 (void)OH_NativeWindow_NativeWindowHandleOpt(outWindow_, SET_FORMAT, param_.outFmt);
170 videoProcessorImpl_->GetVideoProcessing()->SetSurface(outWindow_);
171 videoProcessorImpl_->GetVideoProcessing()->GetSurface(&inWindow_);
172 SetInputWindowParam();
173 VideoProcessing_Callback::Create(&callbackImpl_);
174 callbackImpl_->GetInnerCallback()->BindOnError(OnError);
175 callbackImpl_->GetInnerCallback()->BindOnState(OnState);
176 callbackImpl_->GetInnerCallback()->BindOnNewOutputBuffer(OnNewOutputBufferCallImpl);
177 videoProcessorImpl_->GetVideoProcessing()->RegisterCallback(callbackImpl_, this);
178 OH_AVFormat* parameter = OH_AVFormat_Create();
179 OH_AVFormat_SetIntValue(parameter, VIDEO_DETAIL_ENHANCER_PARAMETER_KEY_QUALITY_LEVEL, qualityLevel_);
180 videoProcessorImpl_->GetVideoProcessing()->SetParameter(parameter);
181 return VIDEO_PROCESSING_SUCCESS;
182 }
183
SetInputWindowParam()184 void VideoSample::SetInputWindowParam()
185 {
186 (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow_, SET_BUFFER_GEOMETRY, param_.inWidth, param_.inHeight);
187 (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow_, SET_USAGE,
188 NATIVEBUFFER_USAGE_CPU_READ | NATIVEBUFFER_USAGE_CPU_WRITE |
189 NATIVEBUFFER_USAGE_MEM_DMA | NATIVEBUFFER_USAGE_HW_RENDER);
190 (void)OH_NativeWindow_NativeWindowHandleOpt(inWindow_, SET_FORMAT, param_.inFmt);
191
192 rect_ = new Region::Rect();
193 rect_->x = 0;
194 rect_->y = 0;
195 rect_->w = param_.inWidth;
196 rect_->h = param_.inHeight;
197 region_.rects = rect_;
198 }
199
InputFunc()200 int32_t VideoSample::InputFunc()
201 {
202 for (int i = 0; i < INPUT_FRAME_NUMBER; i++) {
203 int fenceFd = -1;
204 OHNativeWindowBuffer *ohNativeWindowBuffer;
205 OH_NativeWindow_NativeWindowRequestBuffer(inWindow_, &ohNativeWindowBuffer, &fenceFd);
206 if (fenceFd > 0) {
207 close(fenceFd);
208 }
209 OH_NativeBuffer *nativeBuffer = nullptr;
210 OH_NativeBuffer_FromNativeWindowBuffer(ohNativeWindowBuffer, &nativeBuffer);
211 OH_NativeWindow_NativeWindowFlushBuffer(inWindow_, ohNativeWindowBuffer, -1, region_);
212 }
213 return 0;
214 }
215
StartProcess()216 int32_t VideoSample::StartProcess()
217 {
218 OH_VideoProcessing_Start(videoProcessor_);
219 inputLoop_ = make_unique<thread>(&VideoSample::InputFunc, this);
220 return VIDEO_PROCESSING_SUCCESS;
221 }
222
StartProcessImpl()223 int32_t VideoSample::StartProcessImpl()
224 {
225 videoProcessorImpl_->GetVideoProcessing()->Start();
226 inputLoop_ = make_unique<thread>(&VideoSample::InputFunc, this);
227 return VIDEO_PROCESSING_SUCCESS;
228 }
229
WaitAndStopSample()230 int32_t VideoSample::WaitAndStopSample()
231 {
232 inputLoop_->join();
233 int32_t ret = OH_VideoProcessing_Stop(videoProcessor_);
234 unique_lock<mutex> lock(mutex_);
235 if (cv_.wait_for(lock, STOP_TIMEOUT) == std::cv_status::timeout) {
236 std::cout << "waiting stop state timeout" << std::endl;
237 }
238 return ret;
239 }
240
WaitAndStopSampleImpl()241 int32_t VideoSample::WaitAndStopSampleImpl()
242 {
243 inputLoop_->join();
244 int32_t ret = videoProcessorImpl_->GetVideoProcessing()->Stop();
245 unique_lock<mutex> lock(mutex_);
246 if (cv_.wait_for(lock, STOP_TIMEOUT) == std::cv_status::timeout) {
247 std::cout << "waiting stop state timeout" << std::endl;
248 }
249 return ret;
250 }
251
SetSurfaceOnRunningImpl()252 int32_t VideoSample::SetSurfaceOnRunningImpl()
253 {
254 OH_VideoProcessing* videoProcessing2 = nullptr;
255 OH_VideoProcessing::Create(&videoProcessing2, VIDEO_PROCESSING_TYPE_DETAIL_ENHANCER,
256 VideoProcessingCapiCapability::GetOpenGLContext());
257 OHNativeWindow* window2 = nullptr;
258 videoProcessing2->GetVideoProcessing()->GetSurface(&window2);
259 int32_t ret = videoProcessorImpl_->GetVideoProcessing()->SetSurface(window2);
260 return ret;
261 }
262
OnBufferAvailable()263 void VideoSample::OnBufferAvailable()
264 {
265 unique_lock<mutex> lock(mutexListener_);
266 sptr<SurfaceBuffer> buffer;
267 Rect damage = {};
268 int32_t fence = -1;
269 int64_t timestamp = 0;
270 cs_->AcquireBuffer(buffer, fence, timestamp, damage);
271 inputBufferAvilQue_.push(buffer);
272 lock.unlock();
273 }
274
UpdateErrorCount()275 void VideoSample::UpdateErrorCount()
276 {
277 errCount_++;
278 }
279
SetQualityLevel(VideoDetailEnhancer_QualityLevel level)280 void VideoSample::SetQualityLevel(VideoDetailEnhancer_QualityLevel level)
281 {
282 qualityLevel_ = level;
283 }
284
SetImplLoader(bool isImpl)285 void VideoSample::SetImplLoader(bool isImpl)
286 {
287 isImpl_ = isImpl;
288 }
289
NotifyCv()290 void VideoSample::NotifyCv()
291 {
292 cv_.notify_all();
293 }