• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "output/video_output_napi.h"
17 #include <uv.h>
18 #include "hilog/log.h"
19 
20 namespace OHOS {
21 namespace CameraStandard {
22 using OHOS::HiviewDFX::HiLog;
23 using OHOS::HiviewDFX::HiLogLabel;
24 
25 namespace {
26     constexpr HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN, "VideoOutputNapi"};
27 }
28 
29 thread_local napi_ref VideoOutputNapi::sConstructor_ = nullptr;
30 thread_local sptr<VideoOutput> VideoOutputNapi::sVideoOutput_ = nullptr;
31 thread_local uint32_t VideoOutputNapi::videoOutputTaskId = CAMERA_VIDEO_OUTPUT_TASKID;
32 
VideoCallbackListener(napi_env env)33 VideoCallbackListener::VideoCallbackListener(napi_env env) : env_(env) {}
34 
UpdateJSCallbackAsync(std::string propName,const int32_t value) const35 void VideoCallbackListener::UpdateJSCallbackAsync(std::string propName, const int32_t value) const
36 {
37     uv_loop_s* loop = nullptr;
38     napi_get_uv_event_loop(env_, &loop);
39     if (!loop) {
40         MEDIA_ERR_LOG("VideoCallbackListener:UpdateJSCallbackAsync() failed to get event loop");
41         return;
42     }
43     uv_work_t* work = new(std::nothrow) uv_work_t;
44     if (!work) {
45         MEDIA_ERR_LOG("VideoCallbackListener:UpdateJSCallbackAsync() failed to allocate work");
46         return;
47     }
48     std::unique_ptr<VideoOutputCallbackInfo> callbackInfo =
49         std::make_unique<VideoOutputCallbackInfo>(propName, value, this);
50     work->data = callbackInfo.get();
51     int ret = uv_queue_work(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) {
52         VideoOutputCallbackInfo* callbackInfo = reinterpret_cast<VideoOutputCallbackInfo *>(work->data);
53         if (callbackInfo) {
54             callbackInfo->listener_->UpdateJSCallback(callbackInfo->eventName_, callbackInfo->value_);
55             delete callbackInfo;
56         }
57         delete work;
58     });
59     if (ret) {
60         MEDIA_ERR_LOG("VideoCallbackListener:UpdateJSCallbackAsync() failed to execute work");
61         delete work;
62     }  else {
63         callbackInfo.release();
64     }
65 }
66 
OnFrameStarted() const67 void VideoCallbackListener::OnFrameStarted() const
68 {
69     CAMERA_SYNC_TRACE;
70     MEDIA_INFO_LOG("VideoCallbackListener::OnFrameStarted");
71     UpdateJSCallbackAsync("OnFrameStarted", -1);
72 }
73 
OnFrameEnded(const int32_t frameCount) const74 void VideoCallbackListener::OnFrameEnded(const int32_t frameCount) const
75 {
76     CAMERA_SYNC_TRACE;
77     MEDIA_INFO_LOG("VideoCallbackListener::OnFrameEnded frameCount: %{public}d", frameCount);
78     UpdateJSCallbackAsync("OnFrameEnded", frameCount);
79 }
80 
OnError(const int32_t errorCode) const81 void VideoCallbackListener::OnError(const int32_t errorCode) const
82 {
83     MEDIA_INFO_LOG("VideoCallbackListener::OnError errorCode: %{public}d", errorCode);
84     UpdateJSCallbackAsync("OnError", errorCode);
85 }
86 
SetCallbackRef(const std::string & eventType,const napi_ref & callbackRef)87 void VideoCallbackListener::SetCallbackRef(const std::string &eventType, const napi_ref &callbackRef)
88 {
89     if (eventType.compare("frameStart") == 0) {
90         frameStartCallbackRef_ = callbackRef;
91     } else if (eventType.compare("frameEnd") == 0) {
92         frameEndCallbackRef_ = callbackRef;
93     } else if (eventType.compare("error") == 0) {
94         errorCallbackRef_ = callbackRef;
95     } else {
96         MEDIA_ERR_LOG("Incorrect video callback event type received from JS");
97     }
98 }
99 
UpdateJSCallback(std::string propName,const int32_t value) const100 void VideoCallbackListener::UpdateJSCallback(std::string propName, const int32_t value) const
101 {
102     napi_value result[ARGS_ONE];
103     napi_value callback = nullptr;
104     napi_value retVal;
105     napi_value propValue;
106     int32_t jsErrorCodeUnknown = -1;
107 
108     if (propName.compare("OnFrameStarted") == 0) {
109         CAMERA_NAPI_CHECK_NULL_PTR_RETURN_VOID(frameStartCallbackRef_,
110             "OnFrameStart callback is not registered by JS");
111         napi_get_undefined(env_, &result[PARAM0]);
112         napi_get_reference_value(env_, frameStartCallbackRef_, &callback);
113     } else if (propName.compare("OnFrameEnded") == 0) {
114         CAMERA_NAPI_CHECK_NULL_PTR_RETURN_VOID(frameEndCallbackRef_,
115             "OnFrameEnd callback is not registered by JS");
116         napi_get_undefined(env_, &result[PARAM0]);
117         napi_get_reference_value(env_, frameEndCallbackRef_, &callback);
118     } else {
119         CAMERA_NAPI_CHECK_NULL_PTR_RETURN_VOID(errorCallbackRef_,
120             "OnError callback is not registered by JS");
121         napi_create_object(env_, &result[PARAM0]);
122         napi_create_int32(env_, jsErrorCodeUnknown, &propValue);
123         napi_set_named_property(env_, result[PARAM0], "code", propValue);
124         napi_get_reference_value(env_, errorCallbackRef_, &callback); // should errorcode be valued as -1
125         if (errorCallbackRef_ != nullptr) {
126             napi_delete_reference(env_, errorCallbackRef_);
127         }
128     }
129 
130     napi_call_function(env_, nullptr, callback, ARGS_ONE, result, &retVal);
131 }
132 
VideoOutputNapi()133 VideoOutputNapi::VideoOutputNapi() : env_(nullptr), wrapper_(nullptr)
134 {
135 }
136 
~VideoOutputNapi()137 VideoOutputNapi::~VideoOutputNapi()
138 {
139     if (wrapper_ != nullptr) {
140         napi_delete_reference(env_, wrapper_);
141     }
142     if (videoOutput_) {
143         videoOutput_ = nullptr;
144     }
145     if (videoCallback_) {
146         videoCallback_ = nullptr;
147     }
148 }
149 
VideoOutputNapiDestructor(napi_env env,void * nativeObject,void * finalize_hint)150 void VideoOutputNapi::VideoOutputNapiDestructor(napi_env env, void* nativeObject, void* finalize_hint)
151 {
152     MEDIA_DEBUG_LOG(" VideoOutputNapiDestructor enter");
153     VideoOutputNapi* videoOutput = reinterpret_cast<VideoOutputNapi*>(nativeObject);
154     if (videoOutput != nullptr) {
155         videoOutput->~VideoOutputNapi();
156     }
157 }
158 
Init(napi_env env,napi_value exports)159 napi_value VideoOutputNapi::Init(napi_env env, napi_value exports)
160 {
161     napi_status status;
162     napi_value ctorObj;
163     int32_t refCount = 1;
164 
165     napi_property_descriptor video_output_props[] = {
166         DECLARE_NAPI_FUNCTION("start", Start),
167         DECLARE_NAPI_FUNCTION("stop", Stop),
168         DECLARE_NAPI_FUNCTION("getFrameRateRange", GetFrameRateRange),
169         DECLARE_NAPI_FUNCTION("setFrameRateRange", SetFrameRateRange),
170         DECLARE_NAPI_FUNCTION("release", Release),
171         DECLARE_NAPI_FUNCTION("on", On)
172     };
173 
174     status = napi_define_class(env, CAMERA_VIDEO_OUTPUT_NAPI_CLASS_NAME, NAPI_AUTO_LENGTH,
175                                VideoOutputNapiConstructor, nullptr,
176                                sizeof(video_output_props) / sizeof(video_output_props[PARAM0]),
177                                video_output_props, &ctorObj);
178     if (status == napi_ok) {
179         status = napi_create_reference(env, ctorObj, refCount, &sConstructor_);
180         if (status == napi_ok) {
181             status = napi_set_named_property(env, exports, CAMERA_VIDEO_OUTPUT_NAPI_CLASS_NAME, ctorObj);
182             if (status == napi_ok) {
183                 return exports;
184             }
185         }
186     }
187 
188     return nullptr;
189 }
190 
191 // Constructor callback
VideoOutputNapiConstructor(napi_env env,napi_callback_info info)192 napi_value VideoOutputNapi::VideoOutputNapiConstructor(napi_env env, napi_callback_info info)
193 {
194     napi_status status;
195     napi_value result = nullptr;
196     napi_value thisVar = nullptr;
197 
198     napi_get_undefined(env, &result);
199     CAMERA_NAPI_GET_JS_OBJ_WITH_ZERO_ARGS(env, info, status, thisVar);
200 
201     if (status == napi_ok && thisVar != nullptr) {
202         std::unique_ptr<VideoOutputNapi> obj = std::make_unique<VideoOutputNapi>();
203         if (obj != nullptr) {
204             obj->env_ = env;
205             obj->videoOutput_ = sVideoOutput_;
206 
207             std::shared_ptr<VideoCallbackListener> callback =
208                             std::make_shared<VideoCallbackListener>(VideoCallbackListener(env));
209             ((sptr<VideoOutput> &)(obj->videoOutput_))->SetCallback(callback);
210             obj->videoCallback_ = callback;
211 
212             status = napi_wrap(env, thisVar, reinterpret_cast<void*>(obj.get()),
213                                VideoOutputNapi::VideoOutputNapiDestructor, nullptr, nullptr);
214             if (status == napi_ok) {
215                 obj.release();
216                 return thisVar;
217             } else {
218                 MEDIA_ERR_LOG("Failure wrapping js to native napi");
219             }
220         }
221     }
222 
223     return result;
224 }
225 
ConvertJSArgsToNative(napi_env env,size_t argc,const napi_value argv[],VideoOutputAsyncContext & asyncContext)226 static napi_value ConvertJSArgsToNative(napi_env env, size_t argc, const napi_value argv[],
227     VideoOutputAsyncContext &asyncContext)
228 {
229     std::string str = "";
230     std::vector<std::string> strArr;
231     std::string order = "";
232     const int32_t refCount = 1;
233     napi_value result;
234     auto context = &asyncContext;
235 
236     NAPI_ASSERT(env, argv != nullptr, "Argument list is empty");
237 
238     for (size_t i = PARAM0; i < argc; i++) {
239         napi_valuetype valueType = napi_undefined;
240         napi_typeof(env, argv[i], &valueType);
241 
242         if (i == PARAM0 && valueType == napi_number) {
243             napi_get_value_int32(env, argv[i], &context->minFrameRate);
244         } else if (i == PARAM1 && valueType == napi_number) {
245             napi_get_value_int32(env, argv[i], &context->maxFrameRate);
246         } else if (i == PARAM2 && valueType == napi_function) {
247             napi_create_reference(env, argv[i], refCount, &context->callbackRef);
248         } else {
249             NAPI_ASSERT(env, false, "type mismatch");
250         }
251     }
252     // Return true napi_value if params are successfully obtained
253     napi_get_boolean(env, true, &result);
254     return result;
255 }
256 
CommonCompleteCallback(napi_env env,napi_status status,void * data)257 static void CommonCompleteCallback(napi_env env, napi_status status, void* data)
258 {
259     auto context = static_cast<VideoOutputAsyncContext*>(data);
260 
261     if (context == nullptr) {
262         MEDIA_ERR_LOG("Async context is null");
263         return;
264     }
265 
266     std::unique_ptr<JSAsyncContextOutput> jsContext = std::make_unique<JSAsyncContextOutput>();
267 
268     if (!context->status) {
269         CameraNapiUtils::CreateNapiErrorObject(env, context->errorCode, context->errorMsg.c_str(), jsContext);
270     } else {
271         jsContext->status = true;
272         napi_get_undefined(env, &jsContext->error);
273         if (context->bRetBool) {
274             napi_get_boolean(env, context->status, &jsContext->data);
275         } else {
276             napi_get_undefined(env, &jsContext->data);
277         }
278     }
279 
280     if (!context->funcName.empty() && context->taskId > 0) {
281         // Finish async trace
282         CAMERA_FINISH_ASYNC_TRACE(context->funcName, context->taskId);
283         jsContext->funcName = context->funcName;
284     }
285 
286     if (context->work != nullptr) {
287         CameraNapiUtils::InvokeJSAsyncMethod(env, context->deferred, context->callbackRef,
288                                              context->work, *jsContext);
289     }
290     delete context;
291 }
292 
GetVideoOutput()293 sptr<VideoOutput> VideoOutputNapi::GetVideoOutput()
294 {
295     return videoOutput_;
296 }
297 
IsVideoOutput(napi_env env,napi_value obj)298 bool VideoOutputNapi::IsVideoOutput(napi_env env, napi_value obj)
299 {
300     bool result = false;
301     napi_status status;
302     napi_value constructor = nullptr;
303 
304     status = napi_get_reference_value(env, sConstructor_, &constructor);
305     if (status == napi_ok) {
306         status = napi_instanceof(env, obj, constructor, &result);
307         if (status != napi_ok) {
308             result = false;
309         }
310     }
311 
312     return result;
313 }
314 
CreateVideoOutput(napi_env env,VideoProfile & profile,std::string surfaceId)315 napi_value VideoOutputNapi::CreateVideoOutput(napi_env env, VideoProfile &profile, std::string surfaceId)
316 {
317     CAMERA_SYNC_TRACE;
318     napi_status status;
319     napi_value result = nullptr;
320     napi_value constructor;
321 
322     status = napi_get_reference_value(env, sConstructor_, &constructor);
323     if (status == napi_ok) {
324         uint64_t iSurfaceId;
325         std::istringstream iss(surfaceId);
326         iss >> iSurfaceId;
327         sptr<Surface> surface = SurfaceUtils::GetInstance()->GetSurface(iSurfaceId);
328         if (surface == nullptr) {
329             MEDIA_ERR_LOG("failed to get surface from SurfaceUtils");
330             return result;
331         }
332         surface->SetUserData(CameraManager::surfaceFormat, std::to_string(profile.GetCameraFormat()));
333         int retCode = CameraManager::GetInstance()->CreateVideoOutput(profile, surface, &sVideoOutput_);
334         if (!CameraNapiUtils::CheckError(env, retCode)) {
335             return nullptr;
336         }
337         if (sVideoOutput_ == nullptr) {
338             MEDIA_ERR_LOG("failed to create VideoOutput");
339             return result;
340         }
341         status = napi_new_instance(env, constructor, 0, nullptr, &result);
342         sVideoOutput_ = nullptr;
343         if (status == napi_ok && result != nullptr) {
344             return result;
345         } else {
346             MEDIA_ERR_LOG("Failed to create video output instance");
347         }
348     }
349 
350     napi_get_undefined(env, &result);
351     return result;
352 }
353 
Start(napi_env env,napi_callback_info info)354 napi_value VideoOutputNapi::Start(napi_env env, napi_callback_info info)
355 {
356     napi_status status;
357     napi_value result = nullptr;
358     const int32_t refCount = 1;
359     napi_value resource = nullptr;
360     size_t argc = ARGS_ONE;
361     napi_value argv[ARGS_ONE] = {0};
362     napi_value thisVar = nullptr;
363 
364     CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
365     NAPI_ASSERT(env, argc <= ARGS_ONE, "requires 1 parameter maximum");
366 
367     napi_get_undefined(env, &result);
368     std::unique_ptr<VideoOutputAsyncContext> asyncContext = std::make_unique<VideoOutputAsyncContext>();
369     status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&asyncContext->objectInfo));
370     if (status == napi_ok && asyncContext->objectInfo != nullptr) {
371         if (argc == ARGS_ONE) {
372             CAMERA_NAPI_GET_JS_ASYNC_CB_REF(env, argv[PARAM0], refCount, asyncContext->callbackRef);
373         }
374         CAMERA_NAPI_CREATE_PROMISE(env, asyncContext->callbackRef, asyncContext->deferred, result);
375         CAMERA_NAPI_CREATE_RESOURCE_NAME(env, resource, "Start");
376         status = napi_create_async_work(env, nullptr, resource,
377             [](napi_env env, void* data) {
378                 auto context = static_cast<VideoOutputAsyncContext*>(data);
379                 context->status = false;
380                 // Start async trace
381                 context->funcName = "VideoOutputNapi::Start";
382                 context->taskId = CameraNapiUtils::IncreamentAndGet(videoOutputTaskId);
383                 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
384                 if (context->objectInfo != nullptr) {
385                     context->bRetBool = false;
386                     context->errorCode = ((sptr<VideoOutput> &)(context->objectInfo->videoOutput_))->Start();
387                     context->status = context->errorCode == 0;
388                 }
389             },
390             CommonCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
391         if (status != napi_ok) {
392             MEDIA_ERR_LOG("Failed to create napi_create_async_work for VideoOutputNapi::Start");
393             napi_get_undefined(env, &result);
394         } else {
395             napi_queue_async_work(env, asyncContext->work);
396             asyncContext.release();
397         }
398     }
399 
400     return result;
401 }
402 
Stop(napi_env env,napi_callback_info info)403 napi_value VideoOutputNapi::Stop(napi_env env, napi_callback_info info)
404 {
405     napi_status status;
406     napi_value result = nullptr;
407     const int32_t refCount = 1;
408     napi_value resource = nullptr;
409     size_t argc = ARGS_ONE;
410     napi_value argv[ARGS_ONE] = {0};
411     napi_value thisVar = nullptr;
412 
413     CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
414     NAPI_ASSERT(env, argc <= 1, "requires 1 parameter maximum");
415 
416     napi_get_undefined(env, &result);
417     std::unique_ptr<VideoOutputAsyncContext> asyncContext = std::make_unique<VideoOutputAsyncContext>();
418     status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&asyncContext->objectInfo));
419     if (status == napi_ok && asyncContext->objectInfo != nullptr) {
420         if (argc == ARGS_ONE) {
421             CAMERA_NAPI_GET_JS_ASYNC_CB_REF(env, argv[PARAM0], refCount, asyncContext->callbackRef);
422         }
423         CAMERA_NAPI_CREATE_PROMISE(env, asyncContext->callbackRef, asyncContext->deferred, result);
424         CAMERA_NAPI_CREATE_RESOURCE_NAME(env, resource, "Stop");
425         status = napi_create_async_work(env, nullptr, resource,
426             [](napi_env env, void* data) {
427                 auto context = static_cast<VideoOutputAsyncContext*>(data);
428                 context->status = false;
429                 // Start async trace
430                 context->funcName = "VideoOutputNapi::Stop";
431                 context->taskId = CameraNapiUtils::IncreamentAndGet(videoOutputTaskId);
432                 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
433                 if (context->objectInfo != nullptr) {
434                     context->bRetBool = false;
435                     context->errorCode = ((sptr<VideoOutput> &)(context->objectInfo->videoOutput_))->Stop();
436                     context->status = context->errorCode == 0;
437                 }
438             },
439             CommonCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
440         if (status != napi_ok) {
441             MEDIA_ERR_LOG("Failed to create napi_create_async_work for VideoOutputNapi::Stop");
442             napi_get_undefined(env, &result);
443         } else {
444             napi_queue_async_work(env, asyncContext->work);
445             asyncContext.release();
446         }
447     }
448 
449     return result;
450 }
451 
GetFrameRateRangeAsyncCallbackComplete(napi_env env,napi_status status,void * data)452 void GetFrameRateRangeAsyncCallbackComplete(napi_env env, napi_status status, void* data)
453 {
454     auto context = static_cast<VideoOutputAsyncContext*>(data);
455     napi_value frameRateRange = nullptr;
456 
457     CAMERA_NAPI_CHECK_NULL_PTR_RETURN_VOID(context, "Async context is null");
458 
459     std::unique_ptr<JSAsyncContextOutput> jsContext = std::make_unique<JSAsyncContextOutput>();
460     jsContext->status = true;
461     napi_get_undefined(env, &jsContext->error);
462     if ((!context->vecFrameRateRangeList.empty()) && (napi_create_array(env, &frameRateRange) == napi_ok)) {
463         int32_t j = 0;
464         for (size_t i = 0; i < context->vecFrameRateRangeList.size(); i++) {
465             int32_t  frameRate = context->vecFrameRateRangeList[i];
466             napi_value value;
467             if (napi_create_int32(env, frameRate, &value) == napi_ok) {
468                 napi_set_element(env, frameRateRange, j, value);
469                 j++;
470             }
471         }
472         jsContext->data = frameRateRange;
473     } else {
474         MEDIA_ERR_LOG("vecFrameRateRangeList is empty or failed to create array!");
475         CameraNapiUtils::CreateNapiErrorObject(env, context->errorCode,
476             "vecFrameRateRangeList is empty or failed to create array!", jsContext);
477     }
478 
479     if (!context->funcName.empty() && context->taskId > 0) {
480         // Finish async trace
481         CAMERA_FINISH_ASYNC_TRACE(context->funcName, context->taskId);
482         jsContext->funcName = context->funcName;
483     }
484 
485     if (context->work != nullptr) {
486         CameraNapiUtils::InvokeJSAsyncMethod(env, context->deferred, context->callbackRef,
487                                              context->work, *jsContext);
488     }
489     delete context;
490 }
491 
GetFrameRateRange(napi_env env,napi_callback_info info)492 napi_value VideoOutputNapi::GetFrameRateRange(napi_env env, napi_callback_info info)
493 {
494     CAMERA_SYNC_TRACE;
495     napi_status status;
496     napi_value result = nullptr;
497     const int32_t refCount = 1;
498     napi_value resource = nullptr;
499     size_t argc = ARGS_ONE;
500     napi_value argv[ARGS_ONE] = {0};
501     napi_value thisVar = nullptr;
502 
503     CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
504     NAPI_ASSERT(env, (argc <= ARGS_ONE), "requires 1 parameter maximum");
505 
506     napi_get_undefined(env, &result);
507     auto asyncContext = std::make_unique<VideoOutputAsyncContext>();
508     status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&asyncContext->objectInfo));
509     if (status == napi_ok && asyncContext->objectInfo != nullptr) {
510         if (argc == ARGS_ONE) {
511             CAMERA_NAPI_GET_JS_ASYNC_CB_REF(env, argv[PARAM0], refCount, asyncContext->callbackRef);
512         }
513         CAMERA_NAPI_CREATE_PROMISE(env, asyncContext->callbackRef, asyncContext->deferred, result);
514         CAMERA_NAPI_CREATE_RESOURCE_NAME(env, resource, "GetFrameRateRange");
515         status = napi_create_async_work(
516             env, nullptr, resource, [](napi_env env, void* data) {
517                 auto context = static_cast<VideoOutputAsyncContext*>(data);
518                 context->status = false;
519                 if (context->objectInfo != nullptr) {
520                     if (!context->vecFrameRateRangeList.empty()) {
521                         context->status = true;
522                     } else {
523                         context->status = false;
524                         MEDIA_ERR_LOG("GetFrameRateRange vecFrameRateRangeList is empty!");
525                     }
526                 }
527             },
528             GetFrameRateRangeAsyncCallbackComplete, static_cast<void*>(asyncContext.get()), &asyncContext->work);
529         if (status != napi_ok) {
530             MEDIA_ERR_LOG("Failed to create napi_create_async_work for GetFrameRateRange");
531             napi_get_undefined(env, &result);
532         } else {
533             napi_queue_async_work(env, asyncContext->work);
534             asyncContext.release();
535         }
536     }
537 
538     return result;
539 }
540 
isFrameRateRangeAvailable(napi_env env,void * data)541 bool isFrameRateRangeAvailable(napi_env env, void* data)
542 {
543     bool invalidFrameRate = true;
544     const int32_t FRAME_RATE_RANGE_STEP = 2;
545     auto context = static_cast<VideoOutputAsyncContext*>(data);
546     if (context == nullptr) {
547         MEDIA_ERR_LOG("Async context is null");
548         return invalidFrameRate;
549     }
550 
551     if (!context->vecFrameRateRangeList.empty()) {
552         for (size_t i = 0; i < (context->vecFrameRateRangeList.size() - 1); i += FRAME_RATE_RANGE_STEP) {
553             int32_t minVal = context->vecFrameRateRangeList[i];
554             int32_t maxVal = context->vecFrameRateRangeList[i + 1];
555             if ((context->minFrameRate == minVal) && (context->maxFrameRate == maxVal)) {
556                 invalidFrameRate = false;
557                 break;
558             }
559         }
560     } else {
561         MEDIA_ERR_LOG("isFrameRateRangeAvailable: vecFrameRateRangeList is empty!");
562     }
563     return invalidFrameRate;
564 }
565 
SetFrameRateRange(napi_env env,napi_callback_info info)566 napi_value VideoOutputNapi::SetFrameRateRange(napi_env env, napi_callback_info info)
567 {
568     CAMERA_SYNC_TRACE;
569     napi_status status;
570     napi_value result = nullptr;
571     napi_value resource = nullptr;
572     size_t argc = ARGS_THREE;
573     napi_value argv[ARGS_THREE] = {0};
574     napi_value thisVar = nullptr;
575 
576     CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
577     NAPI_ASSERT(env, (argc == ARGS_TWO || argc == ARGS_THREE), "requires 3 parameters maximum");
578 
579     napi_get_undefined(env, &result);
580     auto asyncContext = std::make_unique<VideoOutputAsyncContext>();
581     status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&asyncContext->objectInfo));
582     if (status == napi_ok && asyncContext->objectInfo != nullptr) {
583         result = ConvertJSArgsToNative(env, argc, argv, *asyncContext);
584         CAMERA_NAPI_CHECK_NULL_PTR_RETURN_UNDEFINED(env, result, result, "Failed to obtain arguments");
585         CAMERA_NAPI_CREATE_PROMISE(env, asyncContext->callbackRef, asyncContext->deferred, result);
586         CAMERA_NAPI_CREATE_RESOURCE_NAME(env, resource, "SetFrameRateRange");
587         status = napi_create_async_work(
588             env, nullptr, resource,
589             [](napi_env env, void* data) {
590                 auto context = static_cast<VideoOutputAsyncContext*>(data);
591                 context->status = false;
592                 // Start async trace
593                 context->funcName = "VideoOutputNapi::SetFrameRateRange";
594                 context->taskId = CameraNapiUtils::IncreamentAndGet(videoOutputTaskId);
595                 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
596                 if (context->objectInfo != nullptr) {
597                     context->bRetBool = false;
598                     bool isValidRange = isFrameRateRangeAvailable(env, data);
599                     if (!isValidRange) {
600                         context->status = true;
601                     } else {
602                         MEDIA_ERR_LOG("Failed to get range values for SetFrameRateRange");
603                         context->errorMsg = "Failed to get range values for SetFrameRateRange";
604                     }
605                 }
606             },
607             CommonCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
608         if (status != napi_ok) {
609             MEDIA_ERR_LOG("Failed to create napi_create_async_work for SetFrameRateRange");
610             napi_get_undefined(env, &result);
611         } else {
612             napi_queue_async_work(env, asyncContext->work);
613             asyncContext.release();
614         }
615     }
616 
617     return result;
618 }
619 
Release(napi_env env,napi_callback_info info)620 napi_value VideoOutputNapi::Release(napi_env env, napi_callback_info info)
621 {
622     napi_status status;
623     napi_value result = nullptr;
624     const int32_t refCount = 1;
625     napi_value resource = nullptr;
626     size_t argc = ARGS_ONE;
627     napi_value argv[ARGS_ONE] = {0};
628     napi_value thisVar = nullptr;
629 
630     CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
631     NAPI_ASSERT(env, argc <= 1, "requires 1 parameter maximum");
632 
633     napi_get_undefined(env, &result);
634     std::unique_ptr<VideoOutputAsyncContext> asyncContext = std::make_unique<VideoOutputAsyncContext>();
635     status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&asyncContext->objectInfo));
636     if (status == napi_ok && asyncContext->objectInfo != nullptr) {
637         if (argc == ARGS_ONE) {
638             CAMERA_NAPI_GET_JS_ASYNC_CB_REF(env, argv[PARAM0], refCount, asyncContext->callbackRef);
639         }
640 
641         CAMERA_NAPI_CREATE_PROMISE(env, asyncContext->callbackRef, asyncContext->deferred, result);
642         CAMERA_NAPI_CREATE_RESOURCE_NAME(env, resource, "Release");
643 
644         status = napi_create_async_work(
645             env, nullptr, resource, [](napi_env env, void* data) {
646                 auto context = static_cast<VideoOutputAsyncContext*>(data);
647                 context->status = false;
648                 // Start async trace
649                 context->funcName = "VideoOutputNapi::Release";
650                 context->taskId = CameraNapiUtils::IncreamentAndGet(videoOutputTaskId);
651                 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
652                 if (context->objectInfo != nullptr) {
653                     context->bRetBool = false;
654                     context->status = true;
655                     ((sptr<VideoOutput> &)(context->objectInfo->videoOutput_))->Release();
656                 }
657             },
658             CommonCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
659         if (status != napi_ok) {
660             MEDIA_ERR_LOG("Failed to create napi_create_async_work for VideoOutputNapi::Release");
661             napi_get_undefined(env, &result);
662         } else {
663             napi_queue_async_work(env, asyncContext->work);
664             asyncContext.release();
665         }
666     }
667 
668     return result;
669 }
670 
On(napi_env env,napi_callback_info info)671 napi_value VideoOutputNapi::On(napi_env env, napi_callback_info info)
672 {
673     CAMERA_SYNC_TRACE;
674     napi_value undefinedResult = nullptr;
675     size_t argCount = ARGS_TWO;
676     napi_value argv[ARGS_TWO] = {nullptr};
677     napi_value thisVar = nullptr;
678     size_t res = 0;
679     char buffer[SIZE];
680     const int32_t refCount = 1;
681     VideoOutputNapi* obj = nullptr;
682     napi_status status;
683 
684     napi_get_undefined(env, &undefinedResult);
685 
686     CAMERA_NAPI_GET_JS_ARGS(env, info, argCount, argv, thisVar);
687     NAPI_ASSERT(env, argCount == ARGS_TWO, "requires 2 parameters");
688 
689     if (thisVar == nullptr || argv[PARAM0] == nullptr || argv[PARAM1] == nullptr) {
690         MEDIA_ERR_LOG("Failed to retrieve details about the callback");
691         return undefinedResult;
692     }
693 
694     status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&obj));
695     if (status == napi_ok && obj != nullptr) {
696         napi_valuetype valueType = napi_undefined;
697         if (napi_typeof(env, argv[PARAM0], &valueType) != napi_ok || valueType != napi_string
698             || napi_typeof(env, argv[PARAM1], &valueType) != napi_ok || valueType != napi_function) {
699             return undefinedResult;
700         }
701 
702         napi_get_value_string_utf8(env, argv[PARAM0], buffer, SIZE, &res);
703         std::string eventType = std::string(buffer);
704 
705         napi_ref callbackRef;
706         napi_create_reference(env, argv[PARAM1], refCount, &callbackRef);
707 
708         if (!eventType.empty()) {
709             obj->videoCallback_->SetCallbackRef(eventType, callbackRef);
710         } else {
711             MEDIA_ERR_LOG("Failed to Register Callback: event type is empty!");
712         }
713     }
714 
715     return undefinedResult;
716 }
717 } // namespace CameraStandard
718 } // namespace OHOS
719