1 /*
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "session/camera_session_napi.h"
17
18 #include <cstdint>
19 #include <mutex>
20 #include <string>
21 #include <unordered_map>
22 #include <uv.h>
23 #include <vector>
24
25 #include "camera_error_code.h"
26 #include "camera_napi_const.h"
27 #include "camera_napi_object_types.h"
28 #include "camera_napi_param_parser.h"
29 #include "camera_napi_security_utils.h"
30 #include "camera_napi_template_utils.h"
31 #include "camera_napi_utils.h"
32 #include "camera_output_capability.h"
33 #include "capture_scene_const.h"
34 #include "capture_session.h"
35 #include "js_native_api.h"
36 #include "js_native_api_types.h"
37 #include "listener_base.h"
38 #include "napi/native_api.h"
39 #include "napi/native_common.h"
40 #include "output/photo_output_napi.h"
41
42 namespace OHOS {
43 namespace CameraStandard {
44 namespace {
AsyncCompleteCallback(napi_env env,napi_status status,void * data)45 void AsyncCompleteCallback(napi_env env, napi_status status, void* data)
46 {
47 auto context = static_cast<CameraSessionAsyncContext*>(data);
48 CHECK_ERROR_RETURN_LOG(context == nullptr, "CameraSessionNapi AsyncCompleteCallback context is null");
49 MEDIA_INFO_LOG("CameraSessionNapi AsyncCompleteCallback %{public}s, status = %{public}d", context->funcName.c_str(),
50 context->status);
51 std::unique_ptr<JSAsyncContextOutput> jsContext = std::make_unique<JSAsyncContextOutput>();
52 jsContext->status = context->status;
53 if (!context->status) {
54 CameraNapiUtils::CreateNapiErrorObject(env, context->errorCode, context->errorMsg.c_str(), jsContext);
55 } else {
56 napi_get_undefined(env, &jsContext->data);
57 }
58 if (!context->funcName.empty() && context->taskId > 0) {
59 // Finish async trace
60 CAMERA_FINISH_ASYNC_TRACE(context->funcName, context->taskId);
61 jsContext->funcName = context->funcName;
62 }
63 if (context->work != nullptr) {
64 CameraNapiUtils::InvokeJSAsyncMethod(env, context->deferred, context->callbackRef, context->work, *jsContext);
65 }
66 context->FreeHeldNapiValue(env);
67 delete context;
68 }
69 } // namespace
70
71 using namespace std;
72 thread_local napi_ref CameraSessionNapi::sConstructor_ = nullptr;
73 thread_local sptr<CaptureSession> CameraSessionNapi::sCameraSession_ = nullptr;
74 thread_local uint32_t CameraSessionNapi::cameraSessionTaskId = CAMERA_SESSION_TASKID;
75
76 const std::map<SceneMode, FunctionsType> CameraSessionNapi::modeToFunctionTypeMap_ = {
77 {SceneMode::CAPTURE, FunctionsType::PHOTO_FUNCTIONS},
78 {SceneMode::VIDEO, FunctionsType::VIDEO_FUNCTIONS},
79 {SceneMode::PORTRAIT, FunctionsType::PORTRAIT_PHOTO_FUNCTIONS}
80 };
81
82 const std::map<SceneMode, FunctionsType> CameraSessionNapi::modeToConflictFunctionTypeMap_ = {
83 {SceneMode::CAPTURE, FunctionsType::PHOTO_CONFLICT_FUNCTIONS},
84 {SceneMode::VIDEO, FunctionsType::VIDEO_CONFLICT_FUNCTIONS},
85 {SceneMode::PORTRAIT, FunctionsType::PORTRAIT_PHOTO_CONFLICT_FUNCTIONS}
86 };
87
88 const std::vector<napi_property_descriptor> CameraSessionNapi::camera_process_props = {
89 DECLARE_NAPI_FUNCTION("beginConfig", CameraSessionNapi::BeginConfig),
90 DECLARE_NAPI_FUNCTION("commitConfig", CameraSessionNapi::CommitConfig),
91
92 DECLARE_NAPI_FUNCTION("canAddInput", CameraSessionNapi::CanAddInput),
93 DECLARE_NAPI_FUNCTION("addInput", CameraSessionNapi::AddInput),
94 DECLARE_NAPI_FUNCTION("removeInput", CameraSessionNapi::RemoveInput),
95
96 DECLARE_NAPI_FUNCTION("canAddOutput", CameraSessionNapi::CanAddOutput),
97 DECLARE_NAPI_FUNCTION("addOutput", CameraSessionNapi::AddOutput),
98 DECLARE_NAPI_FUNCTION("removeOutput", CameraSessionNapi::RemoveOutput),
99
100 DECLARE_NAPI_FUNCTION("start", CameraSessionNapi::Start),
101 DECLARE_NAPI_FUNCTION("stop", CameraSessionNapi::Stop),
102 DECLARE_NAPI_FUNCTION("release", CameraSessionNapi::Release),
103
104 DECLARE_NAPI_FUNCTION("lockForControl", CameraSessionNapi::LockForControl),
105 DECLARE_NAPI_FUNCTION("unlockForControl", CameraSessionNapi::UnlockForControl),
106
107 DECLARE_NAPI_FUNCTION("on", CameraSessionNapi::On),
108 DECLARE_NAPI_FUNCTION("once", CameraSessionNapi::Once),
109 DECLARE_NAPI_FUNCTION("off", CameraSessionNapi::Off),
110 DECLARE_NAPI_FUNCTION("setUsage", CameraSessionNapi::SetUsage)
111 };
112
113 const std::vector<napi_property_descriptor> CameraSessionNapi::stabilization_props = {
114 DECLARE_NAPI_FUNCTION("isVideoStabilizationModeSupported", CameraSessionNapi::IsVideoStabilizationModeSupported),
115 DECLARE_NAPI_FUNCTION("getActiveVideoStabilizationMode", CameraSessionNapi::GetActiveVideoStabilizationMode),
116 DECLARE_NAPI_FUNCTION("setVideoStabilizationMode", CameraSessionNapi::SetVideoStabilizationMode)
117 };
118
119 const std::vector<napi_property_descriptor> CameraSessionNapi::flash_props = {
120 DECLARE_NAPI_FUNCTION("hasFlash", CameraSessionNapi::HasFlash),
121 DECLARE_NAPI_FUNCTION("isFlashModeSupported", CameraSessionNapi::IsFlashModeSupported),
122 DECLARE_NAPI_FUNCTION("getFlashMode", CameraSessionNapi::GetFlashMode),
123 DECLARE_NAPI_FUNCTION("setFlashMode", CameraSessionNapi::SetFlashMode),
124 DECLARE_NAPI_FUNCTION("isLcdFlashSupported", CameraSessionNapi::IsLcdFlashSupported),
125 DECLARE_NAPI_FUNCTION("enableLcdFlash", CameraSessionNapi::EnableLcdFlash)
126 };
127
128 const std::vector<napi_property_descriptor> CameraSessionNapi::auto_exposure_props = {
129 DECLARE_NAPI_FUNCTION("isExposureModeSupported", CameraSessionNapi::IsExposureModeSupported),
130 DECLARE_NAPI_FUNCTION("getExposureMode", CameraSessionNapi::GetExposureMode),
131 DECLARE_NAPI_FUNCTION("setExposureMode", CameraSessionNapi::SetExposureMode),
132 DECLARE_NAPI_FUNCTION("getExposureBiasRange", CameraSessionNapi::GetExposureBiasRange),
133 DECLARE_NAPI_FUNCTION("setExposureBias", CameraSessionNapi::SetExposureBias),
134 DECLARE_NAPI_FUNCTION("getExposureValue", CameraSessionNapi::GetExposureValue),
135 DECLARE_NAPI_FUNCTION("getMeteringPoint", CameraSessionNapi::GetMeteringPoint),
136 DECLARE_NAPI_FUNCTION("setMeteringPoint", CameraSessionNapi::SetMeteringPoint)
137 };
138
139 const std::vector<napi_property_descriptor> CameraSessionNapi::focus_props = {
140 DECLARE_NAPI_FUNCTION("isFocusModeSupported", CameraSessionNapi::IsFocusModeSupported),
141 DECLARE_NAPI_FUNCTION("getFocusMode", CameraSessionNapi::GetFocusMode),
142 DECLARE_NAPI_FUNCTION("setFocusMode", CameraSessionNapi::SetFocusMode),
143 DECLARE_NAPI_FUNCTION("getFocusPoint", CameraSessionNapi::GetFocusPoint),
144 DECLARE_NAPI_FUNCTION("setFocusPoint", CameraSessionNapi::SetFocusPoint),
145 DECLARE_NAPI_FUNCTION("getFocalLength", CameraSessionNapi::GetFocalLength),
146 DECLARE_NAPI_FUNCTION("isFocusRangeTypeSupported", CameraSessionNapi::IsFocusRangeTypeSupported),
147 DECLARE_NAPI_FUNCTION("getFocusRange", CameraSessionNapi::GetFocusRange),
148 DECLARE_NAPI_FUNCTION("setFocusRange", CameraSessionNapi::SetFocusRange),
149 DECLARE_NAPI_FUNCTION("isFocusDrivenTypeSupported", CameraSessionNapi::IsFocusDrivenTypeSupported),
150 DECLARE_NAPI_FUNCTION("getFocusDriven", CameraSessionNapi::GetFocusDriven),
151 DECLARE_NAPI_FUNCTION("setFocusDriven", CameraSessionNapi::SetFocusDriven)
152 };
153
154 const std::vector<napi_property_descriptor> CameraSessionNapi::color_reservation_props = {
155 DECLARE_NAPI_FUNCTION("getSupportedColorReservationTypes", CameraSessionNapi::GetSupportedColorReservationTypes),
156 DECLARE_NAPI_FUNCTION("getColorReservation", CameraSessionNapi::GetColorReservation),
157 DECLARE_NAPI_FUNCTION("setColorReservation", CameraSessionNapi::SetColorReservation)
158 };
159
160 const std::vector<napi_property_descriptor> CameraSessionNapi::quality_prioritization_props = {
161 DECLARE_NAPI_FUNCTION("setQualityPrioritization", CameraSessionNapi::SetQualityPrioritization),
162 };
163
164 const std::vector<napi_property_descriptor> CameraSessionNapi::manual_focus_props = {
165 DECLARE_NAPI_FUNCTION("getFocusDistance", CameraSessionNapi::GetFocusDistance),
166 DECLARE_NAPI_FUNCTION("setFocusDistance", CameraSessionNapi::SetFocusDistance),
167 };
168
169 const std::vector<napi_property_descriptor> CameraSessionNapi::zoom_props = {
170 DECLARE_NAPI_FUNCTION("getZoomRatioRange", CameraSessionNapi::GetZoomRatioRange),
171 DECLARE_NAPI_FUNCTION("getZoomRatio", CameraSessionNapi::GetZoomRatio),
172 DECLARE_NAPI_FUNCTION("setZoomRatio", CameraSessionNapi::SetZoomRatio),
173 DECLARE_NAPI_FUNCTION("prepareZoom", PrepareZoom),
174 DECLARE_NAPI_FUNCTION("unprepareZoom", UnPrepareZoom),
175 DECLARE_NAPI_FUNCTION("setSmoothZoom", SetSmoothZoom),
176 DECLARE_NAPI_FUNCTION("getZoomPointInfos", CameraSessionNapi::GetZoomPointInfos)
177 };
178
179 const std::vector<napi_property_descriptor> CameraSessionNapi::filter_props = {
180 DECLARE_NAPI_FUNCTION("getSupportedFilters", CameraSessionNapi::GetSupportedFilters),
181 DECLARE_NAPI_FUNCTION("getFilter", CameraSessionNapi::GetFilter),
182 DECLARE_NAPI_FUNCTION("setFilter", CameraSessionNapi::SetFilter)
183 };
184
185 const std::vector<napi_property_descriptor> CameraSessionNapi::beauty_props = {
186 DECLARE_NAPI_FUNCTION("getSupportedBeautyTypes", CameraSessionNapi::GetSupportedBeautyTypes),
187 DECLARE_NAPI_FUNCTION("getSupportedBeautyRange", CameraSessionNapi::GetSupportedBeautyRange),
188 DECLARE_NAPI_FUNCTION("getBeauty", CameraSessionNapi::GetBeauty),
189 DECLARE_NAPI_FUNCTION("setBeauty", CameraSessionNapi::SetBeauty),
190 DECLARE_NAPI_FUNCTION("getSupportedPortraitThemeTypes", GetSupportedPortraitThemeTypes),
191 DECLARE_NAPI_FUNCTION("isPortraitThemeSupported", IsPortraitThemeSupported),
192 DECLARE_NAPI_FUNCTION("setPortraitThemeType", SetPortraitThemeType)
193 };
194
195 const std::vector<napi_property_descriptor> CameraSessionNapi::color_effect_props = {
196 DECLARE_NAPI_FUNCTION("getSupportedColorEffects", CameraSessionNapi::GetSupportedColorEffects),
197 DECLARE_NAPI_FUNCTION("getColorEffect", CameraSessionNapi::GetColorEffect),
198 DECLARE_NAPI_FUNCTION("setColorEffect", CameraSessionNapi::SetColorEffect)
199 };
200
201 const std::vector<napi_property_descriptor> CameraSessionNapi::macro_props = {
202 DECLARE_NAPI_FUNCTION("isMacroSupported", CameraSessionNapi::IsMacroSupported),
203 DECLARE_NAPI_FUNCTION("enableMacro", CameraSessionNapi::EnableMacro)
204 };
205
206 const std::vector<napi_property_descriptor> CameraSessionNapi::depth_fusion_props = {
207 DECLARE_NAPI_FUNCTION("isDepthFusionSupported", CameraSessionNapi::IsDepthFusionSupported),
208 DECLARE_NAPI_FUNCTION("getDepthFusionThreshold", CameraSessionNapi::GetDepthFusionThreshold),
209 DECLARE_NAPI_FUNCTION("isDepthFusionEnabled", CameraSessionNapi::IsDepthFusionEnabled),
210 DECLARE_NAPI_FUNCTION("enableDepthFusion", CameraSessionNapi::EnableDepthFusion)
211 };
212
213 const std::vector<napi_property_descriptor> CameraSessionNapi::moon_capture_boost_props = {
214 DECLARE_NAPI_FUNCTION("isMoonCaptureBoostSupported", CameraSessionNapi::IsMoonCaptureBoostSupported),
215 DECLARE_NAPI_FUNCTION("enableMoonCaptureBoost", CameraSessionNapi::EnableMoonCaptureBoost)
216 };
217
218 const std::vector<napi_property_descriptor> CameraSessionNapi::features_props = {
219 DECLARE_NAPI_FUNCTION("isSceneFeatureSupported", CameraSessionNapi::IsFeatureSupported),
220 DECLARE_NAPI_FUNCTION("enableSceneFeature", CameraSessionNapi::EnableFeature)
221 };
222
223 const std::vector<napi_property_descriptor> CameraSessionNapi::color_management_props = {
224 DECLARE_NAPI_FUNCTION("getSupportedColorSpaces", CameraSessionNapi::GetSupportedColorSpaces),
225 DECLARE_NAPI_FUNCTION("getActiveColorSpace", CameraSessionNapi::GetActiveColorSpace),
226 DECLARE_NAPI_FUNCTION("setColorSpace", CameraSessionNapi::SetColorSpace)
227 };
228
229 const std::vector<napi_property_descriptor> CameraSessionNapi::preconfig_props = {
230 DECLARE_NAPI_FUNCTION("canPreconfig", CameraSessionNapi::CanPreconfig),
231 DECLARE_NAPI_FUNCTION("preconfig", CameraSessionNapi::Preconfig)
232 };
233
234 const std::vector<napi_property_descriptor> CameraSessionNapi::camera_output_capability_props = {
235 DECLARE_NAPI_FUNCTION("getCameraOutputCapabilities", CameraSessionNapi::GetCameraOutputCapabilities)
236 };
237
238 const std::vector<napi_property_descriptor> CameraSessionNapi::camera_ability_props = {
239 DECLARE_NAPI_FUNCTION("getSessionFunctions", CameraSessionNapi::GetSessionFunctions),
240 DECLARE_NAPI_FUNCTION("getSessionConflictFunctions", CameraSessionNapi::GetSessionConflictFunctions)
241 };
242
243 const std::vector<napi_property_descriptor> CameraSessionNapi::effect_suggestion_props = {
244 DECLARE_NAPI_FUNCTION("isEffectSuggestionSupported", CameraSessionNapi::IsEffectSuggestionSupported),
245 DECLARE_NAPI_FUNCTION("enableEffectSuggestion", CameraSessionNapi::EnableEffectSuggestion),
246 DECLARE_NAPI_FUNCTION("getSupportedEffectSuggestionType", CameraSessionNapi::GetSupportedEffectSuggestionType),
247 DECLARE_NAPI_FUNCTION("getSupportedEffectSuggestionTypes", CameraSessionNapi::GetSupportedEffectSuggestionType),
248 DECLARE_NAPI_FUNCTION("setEffectSuggestionStatus", CameraSessionNapi::SetEffectSuggestionStatus),
249 DECLARE_NAPI_FUNCTION("updateEffectSuggestion", CameraSessionNapi::UpdateEffectSuggestion)
250 };
251
252 const std::vector<napi_property_descriptor> CameraSessionNapi::auto_wb_props = {
253 DECLARE_NAPI_FUNCTION("getSupportedWhiteBalanceModes", CameraSessionNapi::GetSupportedWhiteBalanceModes),
254 DECLARE_NAPI_FUNCTION("isWhiteBalanceModeSupported", CameraSessionNapi::IsWhiteBalanceModeSupported),
255 DECLARE_NAPI_FUNCTION("getWhiteBalanceMode", CameraSessionNapi::GetWhiteBalanceMode),
256 DECLARE_NAPI_FUNCTION("setWhiteBalanceMode", CameraSessionNapi::SetWhiteBalanceMode),
257 };
258
259 const std::vector<napi_property_descriptor> CameraSessionNapi::manual_wb_props = {
260 DECLARE_NAPI_FUNCTION("getWhiteBalanceRange", CameraSessionNapi::GetManualWhiteBalanceRange),
261 DECLARE_NAPI_FUNCTION("isManualWhiteBalanceSupported", CameraSessionNapi::IsManualWhiteBalanceSupported),
262 DECLARE_NAPI_FUNCTION("getWhiteBalance", CameraSessionNapi::GetManualWhiteBalance),
263 DECLARE_NAPI_FUNCTION("setWhiteBalance", CameraSessionNapi::SetManualWhiteBalance),
264 };
265
266 const std::vector<napi_property_descriptor> CameraSessionNapi::aperture_props = {
267 DECLARE_NAPI_FUNCTION("getSupportedVirtualApertures", CameraSessionNapi::GetSupportedVirtualApertures),
268 DECLARE_NAPI_FUNCTION("getVirtualAperture", CameraSessionNapi::GetVirtualAperture),
269 DECLARE_NAPI_FUNCTION("setVirtualAperture", CameraSessionNapi::SetVirtualAperture),
270
271 DECLARE_NAPI_FUNCTION("getSupportedPhysicalApertures", CameraSessionNapi::GetSupportedPhysicalApertures),
272 DECLARE_NAPI_FUNCTION("getPhysicalAperture", CameraSessionNapi::GetPhysicalAperture),
273 DECLARE_NAPI_FUNCTION("setPhysicalAperture", CameraSessionNapi::SetPhysicalAperture)
274 };
275
276 const std::vector<napi_property_descriptor> CameraSessionNapi::auto_switch_props = {
277 DECLARE_NAPI_FUNCTION("isAutoDeviceSwitchSupported", CameraSessionNapi::IsAutoDeviceSwitchSupported),
278 DECLARE_NAPI_FUNCTION("enableAutoDeviceSwitch", CameraSessionNapi::EnableAutoDeviceSwitch)
279 };
280
OnExposureStateCallbackAsync(ExposureState state) const281 void ExposureCallbackListener::OnExposureStateCallbackAsync(ExposureState state) const
282 {
283 MEDIA_DEBUG_LOG("OnExposureStateCallbackAsync is called");
284 uv_loop_s* loop = nullptr;
285 napi_get_uv_event_loop(env_, &loop);
286 if (!loop) {
287 MEDIA_ERR_LOG("failed to get event loop");
288 return;
289 }
290 uv_work_t* work = new(std::nothrow) uv_work_t;
291 if (!work) {
292 MEDIA_ERR_LOG("failed to allocate work");
293 return;
294 }
295 std::unique_ptr<ExposureCallbackInfo> callbackInfo = std::make_unique<ExposureCallbackInfo>(state, this);
296 work->data = callbackInfo.get();
297 int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) {
298 ExposureCallbackInfo* callbackInfo = reinterpret_cast<ExposureCallbackInfo *>(work->data);
299 if (callbackInfo) {
300 callbackInfo->listener_->OnExposureStateCallback(callbackInfo->state_);
301 delete callbackInfo;
302 }
303 delete work;
304 }, uv_qos_user_initiated);
305 if (ret) {
306 MEDIA_ERR_LOG("failed to execute work");
307 delete work;
308 } else {
309 callbackInfo.release();
310 }
311 }
312
OnExposureStateCallback(ExposureState state) const313 void ExposureCallbackListener::OnExposureStateCallback(ExposureState state) const
314 {
315 MEDIA_DEBUG_LOG("OnExposureStateCallback is called");
316 napi_value result[ARGS_TWO] = {nullptr, nullptr};
317 napi_value retVal;
318
319 napi_get_undefined(env_, &result[PARAM0]);
320 napi_create_int32(env_, state, &result[PARAM1]);
321 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
322 ExecuteCallback("exposureStateChange", callbackNapiPara);
323 }
324
OnExposureState(const ExposureState state)325 void ExposureCallbackListener::OnExposureState(const ExposureState state)
326 {
327 MEDIA_DEBUG_LOG("OnExposureState is called, state: %{public}d", state);
328 OnExposureStateCallbackAsync(state);
329 }
330
OnFocusStateCallbackAsync(FocusState state) const331 void FocusCallbackListener::OnFocusStateCallbackAsync(FocusState state) const
332 {
333 MEDIA_DEBUG_LOG("OnFocusStateCallbackAsync is called");
334 uv_loop_s* loop = nullptr;
335 napi_get_uv_event_loop(env_, &loop);
336 if (!loop) {
337 MEDIA_ERR_LOG("failed to get event loop");
338 return;
339 }
340 uv_work_t* work = new(std::nothrow) uv_work_t;
341 if (!work) {
342 MEDIA_ERR_LOG("failed to allocate work");
343 return;
344 }
345 std::unique_ptr<FocusCallbackInfo> callbackInfo = std::make_unique<FocusCallbackInfo>(state, this);
346 work->data = callbackInfo.get();
347 int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) {
348 FocusCallbackInfo* callbackInfo = reinterpret_cast<FocusCallbackInfo *>(work->data);
349 if (callbackInfo) {
350 callbackInfo->listener_->OnFocusStateCallback(callbackInfo->state_);
351 delete callbackInfo;
352 }
353 delete work;
354 }, uv_qos_user_initiated);
355 if (ret) {
356 MEDIA_ERR_LOG("failed to execute work");
357 delete work;
358 } else {
359 callbackInfo.release();
360 }
361 }
362
OnFocusStateCallback(FocusState state) const363 void FocusCallbackListener::OnFocusStateCallback(FocusState state) const
364 {
365 MEDIA_DEBUG_LOG("OnFocusStateCallback is called");
366 napi_value result[ARGS_TWO] = {nullptr, nullptr};
367 napi_value retVal;
368 napi_get_undefined(env_, &result[PARAM0]);
369 napi_create_int32(env_, state, &result[PARAM1]);
370 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
371 ExecuteCallback("focusStateChange", callbackNapiPara);
372 }
373
OnFocusState(FocusState state)374 void FocusCallbackListener::OnFocusState(FocusState state)
375 {
376 MEDIA_DEBUG_LOG("OnFocusState is called, state: %{public}d", state);
377 OnFocusStateCallbackAsync(state);
378 }
379
OnMacroStatusCallbackAsync(MacroStatus status) const380 void MacroStatusCallbackListener::OnMacroStatusCallbackAsync(MacroStatus status) const
381 {
382 MEDIA_DEBUG_LOG("OnMacroStatusCallbackAsync is called");
383 uv_loop_s* loop = nullptr;
384 napi_get_uv_event_loop(env_, &loop);
385 if (!loop) {
386 MEDIA_ERR_LOG("failed to get event loop");
387 return;
388 }
389 uv_work_t* work = new (std::nothrow) uv_work_t;
390 if (!work) {
391 MEDIA_ERR_LOG("failed to allocate work");
392 return;
393 }
394 auto callbackInfo = std::make_unique<MacroStatusCallbackInfo>(status, this);
395 work->data = callbackInfo.get();
396 int ret = uv_queue_work_with_qos(
397 loop, work, [](uv_work_t* work) {},
398 [](uv_work_t* work, int status) {
399 auto callbackInfo = reinterpret_cast<MacroStatusCallbackInfo*>(work->data);
400 if (callbackInfo) {
401 callbackInfo->listener_->OnMacroStatusCallback(callbackInfo->status_);
402 delete callbackInfo;
403 }
404 delete work;
405 },
406 uv_qos_user_initiated);
407 if (ret) {
408 MEDIA_ERR_LOG("failed to execute work");
409 delete work;
410 } else {
411 callbackInfo.release();
412 }
413 }
414
OnMacroStatusCallback(MacroStatus status) const415 void MacroStatusCallbackListener::OnMacroStatusCallback(MacroStatus status) const
416 {
417 MEDIA_DEBUG_LOG("OnMacroStatusCallback is called");
418 napi_value result[ARGS_TWO] = { nullptr, nullptr };
419 napi_value retVal;
420 napi_get_undefined(env_, &result[PARAM0]);
421 napi_get_boolean(env_, status == MacroStatus::ACTIVE, &result[PARAM1]);
422 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
423 ExecuteCallback("macroStatusChanged", callbackNapiPara);
424 }
425
OnMacroStatusChanged(MacroStatus status)426 void MacroStatusCallbackListener::OnMacroStatusChanged(MacroStatus status)
427 {
428 MEDIA_DEBUG_LOG("OnMacroStatusChanged is called, status: %{public}d", status);
429 OnMacroStatusCallbackAsync(status);
430 }
431
OnMoonCaptureBoostStatusCallbackAsync(MoonCaptureBoostStatus status) const432 void MoonCaptureBoostCallbackListener::OnMoonCaptureBoostStatusCallbackAsync(MoonCaptureBoostStatus status) const
433 {
434 MEDIA_DEBUG_LOG("OnMoonCaptureBoostStatusCallbackAsync is called");
435 uv_loop_s* loop = nullptr;
436 napi_get_uv_event_loop(env_, &loop);
437 if (!loop) {
438 MEDIA_ERR_LOG("failed to get event loop");
439 return;
440 }
441 uv_work_t* work = new (std::nothrow) uv_work_t;
442 if (!work) {
443 MEDIA_ERR_LOG("failed to allocate work");
444 return;
445 }
446 auto callbackInfo = std::make_unique<MoonCaptureBoostStatusCallbackInfo>(status, this);
447 work->data = callbackInfo.get();
448 int ret = uv_queue_work_with_qos(
449 loop, work, [](uv_work_t* work) {},
450 [](uv_work_t* work, int status) {
451 auto callbackInfo = reinterpret_cast<MoonCaptureBoostStatusCallbackInfo*>(work->data);
452 if (callbackInfo) {
453 callbackInfo->listener_->OnMoonCaptureBoostStatusCallback(callbackInfo->status_);
454 delete callbackInfo;
455 }
456 delete work;
457 },
458 uv_qos_user_initiated);
459 if (ret) {
460 MEDIA_ERR_LOG("failed to execute work");
461 delete work;
462 } else {
463 callbackInfo.release();
464 }
465 }
466
OnMoonCaptureBoostStatusCallback(MoonCaptureBoostStatus status) const467 void MoonCaptureBoostCallbackListener::OnMoonCaptureBoostStatusCallback(MoonCaptureBoostStatus status) const
468 {
469 MEDIA_DEBUG_LOG("OnMoonCaptureBoostStatusCallback is called");
470 napi_value result[ARGS_TWO] = { nullptr, nullptr };
471 napi_value retVal;
472 napi_get_undefined(env_, &result[PARAM0]);
473 napi_get_boolean(env_, status == MoonCaptureBoostStatus::ACTIVE, &result[PARAM1]);
474 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
475 ExecuteCallback("moonCaptureBoostStatus", callbackNapiPara);
476 }
477
OnMoonCaptureBoostStatusChanged(MoonCaptureBoostStatus status)478 void MoonCaptureBoostCallbackListener::OnMoonCaptureBoostStatusChanged(MoonCaptureBoostStatus status)
479 {
480 MEDIA_DEBUG_LOG("OnMoonCaptureBoostStatusChanged is called, status: %{public}d", status);
481 OnMoonCaptureBoostStatusCallbackAsync(status);
482 }
483
OnFeatureDetectionStatusChangedCallbackAsync(SceneFeature feature,FeatureDetectionStatus status) const484 void FeatureDetectionStatusCallbackListener::OnFeatureDetectionStatusChangedCallbackAsync(
485 SceneFeature feature, FeatureDetectionStatus status) const
486 {
487 MEDIA_DEBUG_LOG("OnFeatureDetectionStatusChangedCallbackAsync is called");
488 uv_loop_s* loop = nullptr;
489 napi_get_uv_event_loop(env_, &loop);
490 if (!loop) {
491 MEDIA_ERR_LOG("failed to get event loop");
492 return;
493 }
494 uv_work_t* work = new (std::nothrow) uv_work_t;
495 if (!work) {
496 MEDIA_ERR_LOG("failed to allocate work");
497 return;
498 }
499 auto callbackInfo = std::make_unique<FeatureDetectionStatusCallbackInfo>(feature, status, this);
500 work->data = callbackInfo.get();
501 int ret = uv_queue_work_with_qos(
502 loop, work, [](uv_work_t* work) {},
503 [](uv_work_t* work, int status) {
504 auto callbackInfo = reinterpret_cast<FeatureDetectionStatusCallbackInfo*>(work->data);
505 if (callbackInfo) {
506 callbackInfo->listener_->OnFeatureDetectionStatusChangedCallback(
507 callbackInfo->feature_, callbackInfo->status_);
508 delete callbackInfo;
509 }
510 delete work;
511 },
512 uv_qos_user_initiated);
513 if (ret) {
514 MEDIA_ERR_LOG("failed to execute work");
515 delete work;
516 } else {
517 callbackInfo.release();
518 }
519 }
520
OnFeatureDetectionStatusChangedCallback(SceneFeature feature,FeatureDetectionStatus status) const521 void FeatureDetectionStatusCallbackListener::OnFeatureDetectionStatusChangedCallback(
522 SceneFeature feature, FeatureDetectionStatus status) const
523 {
524 MEDIA_DEBUG_LOG("OnFeatureDetectionStatusChangedCallback is called");
525 std::string eventName = "featureDetection" + std::to_string(static_cast<int32_t>(feature));
526 std::string eventNameOld = "featureDetectionStatus" + std::to_string(static_cast<int32_t>(feature));
527
528 napi_value result[ARGS_TWO] = { nullptr, nullptr };
529 napi_value retVal;
530 napi_get_undefined(env_, &result[PARAM0]);
531 napi_create_object(env_, &result[PARAM1]);
532
533 napi_value featureNapiValue;
534 napi_create_int32(env_, feature, &featureNapiValue);
535 napi_set_named_property(env_, result[PARAM1], "featureType", featureNapiValue);
536
537 napi_value statusValue;
538 napi_get_boolean(env_, status == FeatureDetectionStatus::ACTIVE, &statusValue);
539 napi_set_named_property(env_, result[PARAM1], "detected", statusValue);
540
541 if (feature == SceneFeature::FEATURE_TRIPOD_DETECTION) {
542 napi_value tripodStatusValue;
543 auto fwkTripodStatus = GetFeatureStatus();
544 napi_create_int32(env_, fwkTripodStatus, &tripodStatusValue);
545 napi_set_named_property(env_, result[PARAM1], "tripodStatus", tripodStatusValue);
546 }
547 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
548 ExecuteCallback(eventName, callbackNapiPara);
549 ExecuteCallback(eventNameOld, callbackNapiPara);
550 }
551
OnFeatureDetectionStatusChanged(SceneFeature feature,FeatureDetectionStatus status)552 void FeatureDetectionStatusCallbackListener::OnFeatureDetectionStatusChanged(
553 SceneFeature feature, FeatureDetectionStatus status)
554 {
555 MEDIA_DEBUG_LOG(
556 "OnFeatureDetectionStatusChanged is called,feature:%{public}d, status: %{public}d", feature, status);
557 OnFeatureDetectionStatusChangedCallbackAsync(feature, status);
558 }
559
IsFeatureSubscribed(SceneFeature feature)560 bool FeatureDetectionStatusCallbackListener::IsFeatureSubscribed(SceneFeature feature)
561 {
562 std::string eventName = "featureDetection" + std::to_string(static_cast<int32_t>(feature));
563 std::string eventNameOld = "featureDetectionStatus" + std::to_string(static_cast<int32_t>(feature));
564
565 return !IsEmpty(eventName) || !IsEmpty(eventNameOld);
566 }
567
OnErrorCallbackAsync(int32_t errorCode) const568 void SessionCallbackListener::OnErrorCallbackAsync(int32_t errorCode) const
569 {
570 MEDIA_DEBUG_LOG("OnErrorCallbackAsync is called");
571 uv_loop_s* loop = nullptr;
572 napi_get_uv_event_loop(env_, &loop);
573 if (!loop) {
574 MEDIA_ERR_LOG("failed to get event loop");
575 return;
576 }
577 uv_work_t* work = new(std::nothrow) uv_work_t;
578 if (!work) {
579 MEDIA_ERR_LOG("failed to allocate work");
580 return;
581 }
582 std::unique_ptr<SessionCallbackInfo> callbackInfo = std::make_unique<SessionCallbackInfo>(errorCode, this);
583 work->data = callbackInfo.get();
584 int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) {
585 SessionCallbackInfo* callbackInfo = reinterpret_cast<SessionCallbackInfo *>(work->data);
586 if (callbackInfo) {
587 callbackInfo->listener_->OnErrorCallback(callbackInfo->errorCode_);
588 delete callbackInfo;
589 }
590 delete work;
591 }, uv_qos_user_initiated);
592 if (ret) {
593 MEDIA_ERR_LOG("failed to execute work");
594 delete work;
595 } else {
596 callbackInfo.release();
597 }
598 }
599
OnErrorCallback(int32_t errorCode) const600 void SessionCallbackListener::OnErrorCallback(int32_t errorCode) const
601 {
602 MEDIA_DEBUG_LOG("OnErrorCallback is called");
603 napi_value result[ARGS_ONE] = {nullptr};
604 napi_value retVal;
605 napi_value propValue;
606
607 napi_create_object(env_, &result[PARAM0]);
608 napi_create_int32(env_, errorCode, &propValue);
609 napi_set_named_property(env_, result[PARAM0], "code", propValue);
610 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_ONE, .argv = result, .result = &retVal };
611 ExecuteCallback("error", callbackNapiPara);
612 }
613
OnError(int32_t errorCode)614 void SessionCallbackListener::OnError(int32_t errorCode)
615 {
616 MEDIA_DEBUG_LOG("OnError is called, errorCode: %{public}d", errorCode);
617 OnErrorCallbackAsync(errorCode);
618 }
619
OnSmoothZoomCallbackAsync(int32_t duration) const620 void SmoothZoomCallbackListener::OnSmoothZoomCallbackAsync(int32_t duration) const
621 {
622 MEDIA_DEBUG_LOG("OnSmoothZoomCallbackAsync is called");
623 uv_loop_s* loop = nullptr;
624 napi_get_uv_event_loop(env_, &loop);
625 if (!loop) {
626 MEDIA_ERR_LOG("failed to get event loop");
627 return;
628 }
629 uv_work_t* work = new(std::nothrow) uv_work_t;
630 if (!work) {
631 MEDIA_ERR_LOG("failed to allocate work");
632 return;
633 }
634 std::unique_ptr<SmoothZoomCallbackInfo> callbackInfo = std::make_unique<SmoothZoomCallbackInfo>(duration, this);
635 work->data = callbackInfo.get();
636 int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) {
637 SmoothZoomCallbackInfo* callbackInfo = reinterpret_cast<SmoothZoomCallbackInfo *>(work->data);
638 if (callbackInfo) {
639 callbackInfo->listener_->OnSmoothZoomCallback(callbackInfo->duration_);
640 delete callbackInfo;
641 }
642 delete work;
643 }, uv_qos_user_initiated);
644 if (ret) {
645 MEDIA_ERR_LOG("failed to execute work");
646 delete work;
647 } else {
648 callbackInfo.release();
649 }
650 }
651
OnSmoothZoomCallback(int32_t duration) const652 void SmoothZoomCallbackListener::OnSmoothZoomCallback(int32_t duration) const
653 {
654 MEDIA_DEBUG_LOG("OnSmoothZoomCallback is called");
655 napi_value result[ARGS_TWO];
656 napi_value retVal;
657 napi_value propValue;
658
659 napi_get_undefined(env_, &result[PARAM0]);
660 napi_create_object(env_, &result[PARAM1]);
661 napi_create_int32(env_, duration, &propValue);
662 napi_set_named_property(env_, result[PARAM1], "duration", propValue);
663
664 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
665 ExecuteCallback("smoothZoomInfoAvailable", callbackNapiPara);
666 }
667
OnSmoothZoom(int32_t duration)668 void SmoothZoomCallbackListener::OnSmoothZoom(int32_t duration)
669 {
670 MEDIA_DEBUG_LOG("OnSmoothZoom is called, duration: %{public}d", duration);
671 OnSmoothZoomCallbackAsync(duration);
672 }
673
OnAbilityChangeCallbackAsync() const674 void AbilityCallbackListener::OnAbilityChangeCallbackAsync() const
675 {
676 MEDIA_DEBUG_LOG("OnAbilityChangeCallbackAsync is called");
677 uv_loop_s* loop = nullptr;
678 napi_get_uv_event_loop(env_, &loop);
679 if (!loop) {
680 MEDIA_ERR_LOG("failed to get event loop");
681 return;
682 }
683 uv_work_t* work = new(std::nothrow) uv_work_t;
684 if (!work) {
685 MEDIA_ERR_LOG("failed to allocate work");
686 return;
687 }
688 std::unique_ptr<AbilityCallbackInfo> callbackInfo = std::make_unique<AbilityCallbackInfo>(this);
689 work->data = callbackInfo.get();
690 int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) {
691 AbilityCallbackInfo* callbackInfo = reinterpret_cast<AbilityCallbackInfo *>(work->data);
692 if (callbackInfo) {
693 callbackInfo->listener_->OnAbilityChangeCallback();
694 delete callbackInfo;
695 }
696 delete work;
697 }, uv_qos_user_initiated);
698 if (ret) {
699 MEDIA_ERR_LOG("failed to execute work");
700 delete work;
701 } else {
702 callbackInfo.release();
703 }
704 }
705
OnAbilityChangeCallback() const706 void AbilityCallbackListener::OnAbilityChangeCallback() const
707 {
708 MEDIA_DEBUG_LOG("OnAbilityChangeCallback is called");
709 napi_value result[ARGS_TWO];
710 napi_value retVal;
711 napi_get_undefined(env_, &result[PARAM0]);
712 napi_get_undefined(env_, &result[PARAM1]);
713
714 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
715 ExecuteCallback("abilityChange", callbackNapiPara);
716 }
717
OnAbilityChange()718 void AbilityCallbackListener::OnAbilityChange()
719 {
720 MEDIA_DEBUG_LOG("OnAbilityChange is called");
721 OnAbilityChangeCallbackAsync();
722 }
723
OnEffectSuggestionCallbackAsync(EffectSuggestionType effectSuggestionType) const724 void EffectSuggestionCallbackListener::OnEffectSuggestionCallbackAsync(EffectSuggestionType effectSuggestionType) const
725 {
726 MEDIA_DEBUG_LOG("OnEffectSuggestionCallbackAsync is called");
727 uv_loop_s* loop = nullptr;
728 napi_get_uv_event_loop(env_, &loop);
729 if (!loop) {
730 MEDIA_ERR_LOG("failed to get event loop");
731 return;
732 }
733 uv_work_t* work = new(std::nothrow) uv_work_t;
734 if (!work) {
735 MEDIA_ERR_LOG("failed to allocate work");
736 return;
737 }
738 std::unique_ptr<EffectSuggestionCallbackInfo> callbackInfo =
739 std::make_unique<EffectSuggestionCallbackInfo>(effectSuggestionType, this);
740 work->data = callbackInfo.get();
741 int ret = uv_queue_work_with_qos(loop, work, [] (uv_work_t* work) {}, [] (uv_work_t* work, int status) {
742 EffectSuggestionCallbackInfo* callbackInfo = reinterpret_cast<EffectSuggestionCallbackInfo *>(work->data);
743 if (callbackInfo) {
744 callbackInfo->listener_->OnEffectSuggestionCallback(callbackInfo->effectSuggestionType_);
745 delete callbackInfo;
746 }
747 delete work;
748 }, uv_qos_user_initiated);
749 if (ret) {
750 MEDIA_ERR_LOG("failed to execute work");
751 delete work;
752 } else {
753 callbackInfo.release();
754 }
755 }
756
OnEffectSuggestionCallback(EffectSuggestionType effectSuggestionType) const757 void EffectSuggestionCallbackListener::OnEffectSuggestionCallback(EffectSuggestionType effectSuggestionType) const
758 {
759 MEDIA_DEBUG_LOG("OnEffectSuggestionCallback is called");
760 napi_value result[ARGS_TWO] = {nullptr, nullptr};
761 napi_value retVal;
762 napi_get_undefined(env_, &result[PARAM0]);
763 napi_create_int32(env_, effectSuggestionType, &result[PARAM1]);
764 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
765 ExecuteCallback("effectSuggestionChange", callbackNapiPara);
766 }
767
OnEffectSuggestionChange(EffectSuggestionType effectSuggestionType)768 void EffectSuggestionCallbackListener::OnEffectSuggestionChange(EffectSuggestionType effectSuggestionType)
769 {
770 MEDIA_DEBUG_LOG("OnEffectSuggestionChange is called, effectSuggestionType: %{public}d", effectSuggestionType);
771 OnEffectSuggestionCallbackAsync(effectSuggestionType);
772 }
773
OnLcdFlashStatusCallbackAsync(LcdFlashStatusInfo lcdFlashStatusInfo) const774 void LcdFlashStatusCallbackListener::OnLcdFlashStatusCallbackAsync(LcdFlashStatusInfo lcdFlashStatusInfo) const
775 {
776 MEDIA_DEBUG_LOG("OnLcdFlashStatusCallbackAsync is called");
777 uv_loop_s* loop = nullptr;
778 napi_get_uv_event_loop(env_, &loop);
779 if (!loop) {
780 MEDIA_ERR_LOG("failed to get event loop");
781 return;
782 }
783 uv_work_t* work = new (std::nothrow) uv_work_t;
784 if (!work) {
785 MEDIA_ERR_LOG("failed to allocate work");
786 return;
787 }
788 auto callbackInfo = std::make_unique<LcdFlashStatusStatusCallbackInfo>(lcdFlashStatusInfo, this);
789 work->data = callbackInfo.get();
790 int ret = uv_queue_work_with_qos(
791 loop, work, [](uv_work_t* work) {},
792 [](uv_work_t* work, int status) {
793 auto callbackInfo = reinterpret_cast<LcdFlashStatusStatusCallbackInfo*>(work->data);
794 if (callbackInfo) {
795 callbackInfo->listener_->OnLcdFlashStatusCallback(callbackInfo->lcdFlashStatusInfo_);
796 delete callbackInfo;
797 }
798 delete work;
799 },
800 uv_qos_user_initiated);
801 if (ret) {
802 MEDIA_ERR_LOG("failed to execute work");
803 delete work;
804 } else {
805 callbackInfo.release();
806 }
807 }
808
OnLcdFlashStatusCallback(LcdFlashStatusInfo lcdFlashStatusInfo) const809 void LcdFlashStatusCallbackListener::OnLcdFlashStatusCallback(LcdFlashStatusInfo lcdFlashStatusInfo) const
810 {
811 MEDIA_DEBUG_LOG("OnLcdFlashStatusCallback is called");
812 napi_value result[ARGS_TWO] = { nullptr, nullptr };
813 napi_get_undefined(env_, &result[PARAM0]);
814 napi_value retVal;
815 napi_value propValue;
816 napi_create_object(env_, &result[PARAM1]);
817 napi_get_boolean(env_, lcdFlashStatusInfo.isLcdFlashNeeded, &propValue);
818 napi_set_named_property(env_, result[PARAM1], "isLcdFlashNeeded", propValue);
819 napi_create_int32(env_, lcdFlashStatusInfo.lcdCompensation, &propValue);
820 napi_set_named_property(env_, result[PARAM1], "lcdCompensation", propValue);
821 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
822 ExecuteCallback("lcdFlashStatus", callbackNapiPara);
823 }
824
OnLcdFlashStatusChanged(LcdFlashStatusInfo lcdFlashStatusInfo)825 void LcdFlashStatusCallbackListener::OnLcdFlashStatusChanged(LcdFlashStatusInfo lcdFlashStatusInfo)
826 {
827 MEDIA_DEBUG_LOG("OnLcdFlashStatusChanged is called, isLcdFlashNeeded: %{public}d, lcdCompensation: %{public}d",
828 lcdFlashStatusInfo.isLcdFlashNeeded, lcdFlashStatusInfo.lcdCompensation);
829 OnLcdFlashStatusCallbackAsync(lcdFlashStatusInfo);
830 }
831
OnAutoDeviceSwitchCallbackAsync(bool isDeviceSwitched,bool isDeviceCapabilityChanged) const832 void AutoDeviceSwitchCallbackListener::OnAutoDeviceSwitchCallbackAsync(
833 bool isDeviceSwitched, bool isDeviceCapabilityChanged) const
834 {
835 MEDIA_DEBUG_LOG("OnAutoDeviceSwitchCallbackAsync is called");
836 uv_loop_s* loop = nullptr;
837 napi_get_uv_event_loop(env_, &loop);
838 if (!loop) {
839 MEDIA_ERR_LOG("failed to get event loop");
840 return;
841 }
842 uv_work_t* work = new (std::nothrow) uv_work_t;
843 if (!work) {
844 MEDIA_ERR_LOG("failed to allocate work");
845 return;
846 }
847 auto callbackInfo = std::make_unique<AutoDeviceSwitchCallbackListenerInfo>(
848 isDeviceSwitched, isDeviceCapabilityChanged, this);
849 work->data = callbackInfo.get();
850 int ret = uv_queue_work_with_qos(
851 loop, work, [](uv_work_t* work) {},
852 [](uv_work_t* work, int status) {
853 auto callbackInfo = reinterpret_cast<AutoDeviceSwitchCallbackListenerInfo*>(work->data);
854 if (callbackInfo) {
855 callbackInfo->listener_->OnAutoDeviceSwitchCallback(
856 callbackInfo->isDeviceSwitched_, callbackInfo->isDeviceCapabilityChanged_);
857 delete callbackInfo;
858 }
859 delete work;
860 },
861 uv_qos_user_initiated);
862 if (ret) {
863 MEDIA_ERR_LOG("failed to execute work");
864 delete work;
865 } else {
866 callbackInfo.release();
867 }
868 }
869
OnAutoDeviceSwitchCallback(bool isDeviceSwitched,bool isDeviceCapabilityChanged) const870 void AutoDeviceSwitchCallbackListener::OnAutoDeviceSwitchCallback(
871 bool isDeviceSwitched, bool isDeviceCapabilityChanged) const
872 {
873 MEDIA_INFO_LOG("OnAutoDeviceSwitchCallback is called");
874 napi_value result[ARGS_TWO] = { nullptr, nullptr };
875 napi_get_undefined(env_, &result[PARAM0]);
876 napi_value retVal;
877 napi_value propValue;
878 napi_create_object(env_, &result[PARAM1]);
879 napi_get_boolean(env_, isDeviceSwitched, &propValue);
880 napi_set_named_property(env_, result[PARAM1], "isDeviceSwitched", propValue);
881 napi_get_boolean(env_, isDeviceCapabilityChanged, &propValue);
882 napi_set_named_property(env_, result[PARAM1], "isDeviceCapabilityChanged", propValue);
883 ExecuteCallbackNapiPara callbackNapiPara { .recv = nullptr, .argc = ARGS_TWO, .argv = result, .result = &retVal };
884 ExecuteCallback("autoDeviceSwitchStatusChange", callbackNapiPara);
885 }
886
OnAutoDeviceSwitchStatusChange(bool isDeviceSwitched,bool isDeviceCapabilityChanged) const887 void AutoDeviceSwitchCallbackListener::OnAutoDeviceSwitchStatusChange(
888 bool isDeviceSwitched, bool isDeviceCapabilityChanged) const
889 {
890 MEDIA_INFO_LOG("isDeviceSwitched: %{public}d, isDeviceCapabilityChanged: %{public}d",
891 isDeviceSwitched, isDeviceCapabilityChanged);
892 OnAutoDeviceSwitchCallbackAsync(isDeviceSwitched, isDeviceCapabilityChanged);
893 }
894
CameraSessionNapi()895 CameraSessionNapi::CameraSessionNapi() : env_(nullptr) {}
896
~CameraSessionNapi()897 CameraSessionNapi::~CameraSessionNapi()
898 {
899 MEDIA_DEBUG_LOG("~CameraSessionNapi is called");
900 }
901
CameraSessionNapiDestructor(napi_env env,void * nativeObject,void * finalize_hint)902 void CameraSessionNapi::CameraSessionNapiDestructor(napi_env env, void* nativeObject, void* finalize_hint)
903 {
904 MEDIA_DEBUG_LOG("CameraSessionNapiDestructor is called");
905 }
906
Init(napi_env env,napi_value exports)907 napi_value CameraSessionNapi::Init(napi_env env, napi_value exports)
908 {
909 MEDIA_DEBUG_LOG("Init is called");
910 napi_status status;
911 napi_value ctorObj;
912 int32_t refCount = 1;
913 std::vector<std::vector<napi_property_descriptor>> descriptors = { camera_process_props, stabilization_props,
914 flash_props, auto_exposure_props, focus_props, zoom_props, filter_props, beauty_props, color_effect_props,
915 macro_props, depth_fusion_props, moon_capture_boost_props, features_props, color_management_props,
916 manual_focus_props, preconfig_props, camera_output_capability_props };
917 std::vector<napi_property_descriptor> camera_session_props = CameraNapiUtils::GetPropertyDescriptor(descriptors);
918 status = napi_define_class(env, CAMERA_SESSION_NAPI_CLASS_NAME, NAPI_AUTO_LENGTH,
919 CameraSessionNapiConstructor, nullptr,
920 camera_session_props.size(),
921 camera_session_props.data(), &ctorObj);
922 if (status == napi_ok) {
923 status = napi_create_reference(env, ctorObj, refCount, &sConstructor_);
924 if (status == napi_ok) {
925 status = napi_set_named_property(env, exports, CAMERA_SESSION_NAPI_CLASS_NAME, ctorObj);
926 if (status == napi_ok) {
927 return exports;
928 }
929 }
930 }
931 MEDIA_ERR_LOG("Init call Failed!");
932 return nullptr;
933 }
934
935 // Constructor callback
CameraSessionNapiConstructor(napi_env env,napi_callback_info info)936 napi_value CameraSessionNapi::CameraSessionNapiConstructor(napi_env env, napi_callback_info info)
937 {
938 MEDIA_DEBUG_LOG("CameraSessionNapiConstructor is called");
939 napi_status status;
940 napi_value result = nullptr;
941 napi_value thisVar = nullptr;
942
943 napi_get_undefined(env, &result);
944 CAMERA_NAPI_GET_JS_OBJ_WITH_ZERO_ARGS(env, info, status, thisVar);
945
946 if (status == napi_ok && thisVar != nullptr) {
947 std::unique_ptr<CameraSessionNapi> obj = std::make_unique<CameraSessionNapi>();
948 if (obj != nullptr) {
949 obj->env_ = env;
950 if (sCameraSession_ == nullptr) {
951 MEDIA_ERR_LOG("sCameraSession_ is null");
952 return result;
953 }
954 obj->cameraSession_ = sCameraSession_;
955 status = napi_wrap(env, thisVar, reinterpret_cast<void*>(obj.get()),
956 CameraSessionNapi::CameraSessionNapiDestructor, nullptr, nullptr);
957 if (status == napi_ok) {
958 obj.release();
959 return thisVar;
960 } else {
961 MEDIA_ERR_LOG("CameraSessionNapi Failure wrapping js to native napi");
962 }
963 }
964 }
965 MEDIA_ERR_LOG("CameraSessionNapiConstructor call Failed!");
966 return result;
967 }
968
QueryAndGetInputProperty(napi_env env,napi_value arg,const string & propertyName,napi_value & property)969 int32_t QueryAndGetInputProperty(napi_env env, napi_value arg, const string &propertyName, napi_value &property)
970 {
971 MEDIA_DEBUG_LOG("QueryAndGetInputProperty is called");
972 bool present = false;
973 int32_t retval = 0;
974 if ((napi_has_named_property(env, arg, propertyName.c_str(), &present) != napi_ok)
975 || (!present) || (napi_get_named_property(env, arg, propertyName.c_str(), &property) != napi_ok)) {
976 MEDIA_ERR_LOG("Failed to obtain property: %{public}s", propertyName.c_str());
977 retval = -1;
978 }
979
980 return retval;
981 }
982
GetPointProperties(napi_env env,napi_value pointObj,Point & point)983 int32_t GetPointProperties(napi_env env, napi_value pointObj, Point &point)
984 {
985 MEDIA_DEBUG_LOG("GetPointProperties is called");
986 napi_value propertyX = nullptr;
987 napi_value propertyY = nullptr;
988 double pointX = -1.0;
989 double pointY = -1.0;
990
991 if ((QueryAndGetInputProperty(env, pointObj, "x", propertyX) == 0) &&
992 (QueryAndGetInputProperty(env, pointObj, "y", propertyY) == 0)) {
993 if ((napi_get_value_double(env, propertyX, &pointX) != napi_ok) ||
994 (napi_get_value_double(env, propertyY, &pointY) != napi_ok)) {
995 MEDIA_ERR_LOG("GetPointProperties: get propery for x & y failed");
996 return -1;
997 } else {
998 point.x = pointX;
999 point.y = pointY;
1000 }
1001 } else {
1002 return -1;
1003 }
1004
1005 // Return 0 after focus point properties are successfully obtained
1006 return 0;
1007 }
1008
GetPointNapiValue(napi_env env,Point & point)1009 napi_value GetPointNapiValue(napi_env env, Point &point)
1010 {
1011 MEDIA_DEBUG_LOG("GetPointNapiValue is called");
1012 napi_value result;
1013 napi_value propValue;
1014 napi_create_object(env, &result);
1015 napi_create_double(env, CameraNapiUtils::FloatToDouble(point.x), &propValue);
1016 napi_set_named_property(env, result, "x", propValue);
1017 napi_create_double(env, CameraNapiUtils::FloatToDouble(point.y), &propValue);
1018 napi_set_named_property(env, result, "y", propValue);
1019 return result;
1020 }
1021
CreateCameraSession(napi_env env)1022 napi_value CameraSessionNapi::CreateCameraSession(napi_env env)
1023 {
1024 MEDIA_DEBUG_LOG("CreateCameraSession is called");
1025 CAMERA_SYNC_TRACE;
1026 napi_status status;
1027 napi_value result = nullptr;
1028 napi_value constructor;
1029
1030 status = napi_get_reference_value(env, sConstructor_, &constructor);
1031 if (status == napi_ok) {
1032 int retCode = CameraManager::GetInstance()->CreateCaptureSession(&sCameraSession_);
1033 if (!CameraNapiUtils::CheckError(env, retCode)) {
1034 return nullptr;
1035 }
1036 if (sCameraSession_ == nullptr) {
1037 MEDIA_ERR_LOG("Failed to create Camera session instance");
1038 napi_get_undefined(env, &result);
1039 return result;
1040 }
1041 status = napi_new_instance(env, constructor, 0, nullptr, &result);
1042 sCameraSession_ = nullptr;
1043 if (status == napi_ok && result != nullptr) {
1044 MEDIA_DEBUG_LOG("success to create Camera session napi instance");
1045 return result;
1046 } else {
1047 MEDIA_ERR_LOG("Failed to create Camera session napi instance");
1048 }
1049 }
1050 MEDIA_ERR_LOG("Failed to create Camera session napi instance last");
1051 napi_get_undefined(env, &result);
1052 return result;
1053 }
1054
BeginConfig(napi_env env,napi_callback_info info)1055 napi_value CameraSessionNapi::BeginConfig(napi_env env, napi_callback_info info)
1056 {
1057 MEDIA_INFO_LOG("BeginConfig is called");
1058 napi_status status;
1059 napi_value result = nullptr;
1060 size_t argc = ARGS_ZERO;
1061 napi_value argv[ARGS_ZERO];
1062 napi_value thisVar = nullptr;
1063 napi_get_undefined(env, &result);
1064 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1065
1066 CameraSessionNapi* cameraSessionNapi = nullptr;
1067 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1068 if (status == napi_ok && cameraSessionNapi != nullptr) {
1069 int32_t ret = cameraSessionNapi->cameraSession_->BeginConfig();
1070 if (!CameraNapiUtils::CheckError(env, ret)) {
1071 return nullptr;
1072 }
1073 } else {
1074 MEDIA_ERR_LOG("BeginConfig call Failed!");
1075 }
1076 return result;
1077 }
1078
CommitConfig(napi_env env,napi_callback_info info)1079 napi_value CameraSessionNapi::CommitConfig(napi_env env, napi_callback_info info)
1080 {
1081 MEDIA_INFO_LOG("CommitConfig is called");
1082 std::unique_ptr<CameraSessionAsyncContext> asyncContext = std::make_unique<CameraSessionAsyncContext>(
1083 "CameraSessionNapi::CommitConfig", CameraNapiUtils::IncrementAndGet(cameraSessionTaskId));
1084 auto asyncFunction = std::make_shared<CameraNapiAsyncFunction>(
1085 env, "CommitConfig", asyncContext->callbackRef, asyncContext->deferred);
1086 CameraNapiParamParser jsParamParser(env, info, asyncContext->objectInfo, asyncFunction);
1087 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "invalid argument")) {
1088 MEDIA_ERR_LOG("CameraSessionNapi::CommitConfig invalid argument");
1089 return nullptr;
1090 }
1091 asyncContext->HoldNapiValue(env, jsParamParser.GetThisVar());
1092 napi_status status = napi_create_async_work(
1093 env, nullptr, asyncFunction->GetResourceName(),
1094 [](napi_env env, void* data) {
1095 MEDIA_INFO_LOG("CameraSessionNapi::CommitConfig running on worker");
1096 auto context = static_cast<CameraSessionAsyncContext*>(data);
1097 CHECK_ERROR_RETURN_LOG(
1098 context->objectInfo == nullptr, "CameraSessionNapi::CommitConfig async info is nullptr");
1099 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
1100 CameraNapiWorkerQueueKeeper::GetInstance()->ConsumeWorkerQueueTask(context->queueTask, [&context]() {
1101 context->errorCode = context->objectInfo->cameraSession_->CommitConfig();
1102 context->status = context->errorCode == CameraErrorCode::SUCCESS;
1103 MEDIA_INFO_LOG("CameraSessionNapi::CommitConfig errorCode:%{public}d", context->errorCode);
1104 });
1105 },
1106 AsyncCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
1107 if (status != napi_ok) {
1108 MEDIA_ERR_LOG("Failed to create napi_create_async_work for CameraSessionNapi::CommitConfig");
1109 asyncFunction->Reset();
1110 } else {
1111 asyncContext->queueTask =
1112 CameraNapiWorkerQueueKeeper::GetInstance()->AcquireWorkerQueueTask("CameraSessionNapi::CommitConfig");
1113 napi_queue_async_work_with_qos(env, asyncContext->work, napi_qos_user_initiated);
1114 asyncContext.release();
1115 }
1116 if (asyncFunction->GetAsyncFunctionType() == ASYNC_FUN_TYPE_PROMISE) {
1117 return asyncFunction->GetPromise();
1118 }
1119 return CameraNapiUtils::GetUndefinedValue(env);
1120 }
1121
LockForControl(napi_env env,napi_callback_info info)1122 napi_value CameraSessionNapi::LockForControl(napi_env env, napi_callback_info info)
1123 {
1124 MEDIA_DEBUG_LOG("LockForControl is called");
1125 napi_status status;
1126 napi_value result = nullptr;
1127 size_t argc = ARGS_ZERO;
1128 napi_value argv[ARGS_ZERO];
1129 napi_value thisVar = nullptr;
1130
1131 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1132
1133 napi_get_undefined(env, &result);
1134 CameraSessionNapi* cameraSessionNapi = nullptr;
1135 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1136 if (status == napi_ok && cameraSessionNapi != nullptr) {
1137 cameraSessionNapi->cameraSession_->LockForControl();
1138 } else {
1139 MEDIA_ERR_LOG("LockForControl call Failed!");
1140 }
1141 return result;
1142 }
1143
UnlockForControl(napi_env env,napi_callback_info info)1144 napi_value CameraSessionNapi::UnlockForControl(napi_env env, napi_callback_info info)
1145 {
1146 MEDIA_DEBUG_LOG("UnlockForControl is called");
1147 napi_status status;
1148 napi_value result = nullptr;
1149 size_t argc = ARGS_ZERO;
1150 napi_value argv[ARGS_ZERO];
1151 napi_value thisVar = nullptr;
1152
1153 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1154
1155 napi_get_undefined(env, &result);
1156 CameraSessionNapi* cameraSessionNapi = nullptr;
1157 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1158 if (status == napi_ok && cameraSessionNapi != nullptr) {
1159 cameraSessionNapi->cameraSession_->UnlockForControl();
1160 } else {
1161 MEDIA_ERR_LOG("UnlockForControl call Failed!");
1162 }
1163 return result;
1164 }
1165
GetJSArgsForCameraInput(napi_env env,size_t argc,const napi_value argv[],sptr<CaptureInput> & cameraInput)1166 napi_value GetJSArgsForCameraInput(napi_env env, size_t argc, const napi_value argv[],
1167 sptr<CaptureInput> &cameraInput)
1168 {
1169 MEDIA_DEBUG_LOG("GetJSArgsForCameraInput is called");
1170 napi_value result = nullptr;
1171 CameraInputNapi* cameraInputNapiObj = nullptr;
1172
1173 NAPI_ASSERT(env, argv != nullptr, "Argument list is empty");
1174
1175 for (size_t i = PARAM0; i < argc; i++) {
1176 napi_valuetype valueType = napi_undefined;
1177 napi_typeof(env, argv[i], &valueType);
1178 if (i == PARAM0 && valueType == napi_object) {
1179 napi_unwrap(env, argv[i], reinterpret_cast<void**>(&cameraInputNapiObj));
1180 if (cameraInputNapiObj != nullptr) {
1181 cameraInput = cameraInputNapiObj->GetCameraInput();
1182 } else {
1183 NAPI_ASSERT(env, false, "type mismatch");
1184 }
1185 } else {
1186 NAPI_ASSERT(env, false, "type mismatch");
1187 }
1188 }
1189 napi_get_boolean(env, true, &result);
1190 return result;
1191 }
1192
AddInput(napi_env env,napi_callback_info info)1193 napi_value CameraSessionNapi::AddInput(napi_env env, napi_callback_info info)
1194 {
1195 MEDIA_INFO_LOG("AddInput is called");
1196 napi_status status;
1197 napi_value result = nullptr;
1198 size_t argc = ARGS_ONE;
1199 napi_value argv[ARGS_ONE] = {0};
1200 napi_value thisVar = nullptr;
1201
1202 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1203 if (!CameraNapiUtils::CheckInvalidArgument(env, argc, ARGS_ONE, argv, ADD_INPUT)) {
1204 return result;
1205 }
1206
1207 napi_get_undefined(env, &result);
1208 CameraSessionNapi* cameraSessionNapi = nullptr;
1209 sptr<CaptureInput> cameraInput = nullptr;
1210 GetJSArgsForCameraInput(env, argc, argv, cameraInput);
1211 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1212 if (status == napi_ok && cameraSessionNapi != nullptr) {
1213 int32_t ret = cameraSessionNapi->cameraSession_->AddInput(cameraInput);
1214 if (!CameraNapiUtils::CheckError(env, ret)) {
1215 return nullptr;
1216 }
1217 } else {
1218 MEDIA_ERR_LOG("AddInput call Failed!");
1219 }
1220 return result;
1221 }
1222
CanAddInput(napi_env env,napi_callback_info info)1223 napi_value CameraSessionNapi::CanAddInput(napi_env env, napi_callback_info info)
1224 {
1225 MEDIA_DEBUG_LOG("CanAddInput is called");
1226 napi_status status;
1227 napi_value result = nullptr;
1228 size_t argc = ARGS_ONE;
1229 napi_value argv[ARGS_ONE] = {0};
1230 napi_value thisVar = nullptr;
1231
1232 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1233
1234 napi_get_undefined(env, &result);
1235 CameraSessionNapi* cameraSessionNapi = nullptr;
1236 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1237 if (status == napi_ok && cameraSessionNapi != nullptr) {
1238 sptr<CaptureInput> cameraInput = nullptr;
1239 GetJSArgsForCameraInput(env, argc, argv, cameraInput);
1240 bool isSupported = cameraSessionNapi->cameraSession_->CanAddInput(cameraInput);
1241 napi_get_boolean(env, isSupported, &result);
1242 } else {
1243 MEDIA_ERR_LOG("CanAddInput call Failed!");
1244 }
1245 return result;
1246 }
1247
RemoveInput(napi_env env,napi_callback_info info)1248 napi_value CameraSessionNapi::RemoveInput(napi_env env, napi_callback_info info)
1249 {
1250 MEDIA_DEBUG_LOG("RemoveInput is called");
1251 napi_status status;
1252 napi_value result = nullptr;
1253 size_t argc = ARGS_ONE;
1254 napi_value argv[ARGS_ONE] = {0};
1255 napi_value thisVar = nullptr;
1256
1257 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1258 if (!CameraNapiUtils::CheckInvalidArgument(env, argc, ARGS_ONE, argv, REMOVE_INPUT)) {
1259 return result;
1260 }
1261
1262 napi_get_undefined(env, &result);
1263 CameraSessionNapi* cameraSessionNapi = nullptr;
1264 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1265 if (status == napi_ok && cameraSessionNapi != nullptr) {
1266 sptr<CaptureInput> cameraInput = nullptr;
1267 GetJSArgsForCameraInput(env, argc, argv, cameraInput);
1268 int32_t ret = cameraSessionNapi->cameraSession_->RemoveInput(cameraInput);
1269 if (!CameraNapiUtils::CheckError(env, ret)) {
1270 return nullptr;
1271 }
1272 return result;
1273 } else {
1274 MEDIA_ERR_LOG("RemoveInput call Failed!");
1275 }
1276 return result;
1277 }
1278
GetJSArgsForCameraOutput(napi_env env,size_t argc,const napi_value argv[],sptr<CaptureOutput> & cameraOutput)1279 napi_value CameraSessionNapi::GetJSArgsForCameraOutput(napi_env env, size_t argc, const napi_value argv[],
1280 sptr<CaptureOutput> &cameraOutput)
1281 {
1282 MEDIA_DEBUG_LOG("GetJSArgsForCameraOutput is called");
1283 napi_value result = nullptr;
1284 PreviewOutputNapi* previewOutputNapiObj = nullptr;
1285 PhotoOutputNapi* photoOutputNapiObj = nullptr;
1286 VideoOutputNapi* videoOutputNapiObj = nullptr;
1287 MetadataOutputNapi* metadataOutputNapiObj = nullptr;
1288 DepthDataOutputNapi* depthDataOutputNapiObj = nullptr;
1289
1290 NAPI_ASSERT(env, argv != nullptr, "Argument list is empty");
1291
1292 for (size_t i = PARAM0; i < argc; i++) {
1293 napi_valuetype valueType = napi_undefined;
1294 napi_typeof(env, argv[i], &valueType);
1295
1296 if (i == PARAM0 && valueType == napi_object) {
1297 if (PreviewOutputNapi::IsPreviewOutput(env, argv[i])) {
1298 MEDIA_INFO_LOG("preview output adding..");
1299 napi_unwrap(env, argv[i], reinterpret_cast<void**>(&previewOutputNapiObj));
1300 cameraOutput = previewOutputNapiObj->GetPreviewOutput();
1301 } else if (PhotoOutputNapi::IsPhotoOutput(env, argv[i])) {
1302 MEDIA_INFO_LOG("photo output adding..");
1303 napi_unwrap(env, argv[i], reinterpret_cast<void**>(&photoOutputNapiObj));
1304 cameraOutput = photoOutputNapiObj->GetPhotoOutput();
1305 } else if (VideoOutputNapi::IsVideoOutput(env, argv[i])) {
1306 MEDIA_INFO_LOG("video output adding..");
1307 napi_unwrap(env, argv[i], reinterpret_cast<void**>(&videoOutputNapiObj));
1308 cameraOutput = videoOutputNapiObj->GetVideoOutput();
1309 } else if (MetadataOutputNapi::IsMetadataOutput(env, argv[i])) {
1310 MEDIA_INFO_LOG("metadata output adding..");
1311 napi_unwrap(env, argv[i], reinterpret_cast<void**>(&metadataOutputNapiObj));
1312 cameraOutput = metadataOutputNapiObj->GetMetadataOutput();
1313 } else if (DepthDataOutputNapi::IsDepthDataOutput(env, argv[i])) {
1314 MEDIA_INFO_LOG("depth data output adding..");
1315 napi_unwrap(env, argv[i], reinterpret_cast<void**>(&depthDataOutputNapiObj));
1316 cameraOutput = depthDataOutputNapiObj->GetDepthDataOutput();
1317 } else {
1318 MEDIA_INFO_LOG("invalid output ..");
1319 NAPI_ASSERT(env, false, "type mismatch");
1320 }
1321 } else {
1322 NAPI_ASSERT(env, false, "type mismatch");
1323 }
1324 }
1325 // Return true napi_value if params are successfully obtained
1326 napi_get_boolean(env, true, &result);
1327 return result;
1328 }
1329
AddOutput(napi_env env,napi_callback_info info)1330 napi_value CameraSessionNapi::AddOutput(napi_env env, napi_callback_info info)
1331 {
1332 MEDIA_INFO_LOG("AddOutput is called");
1333 napi_status status;
1334 napi_value result = nullptr;
1335 size_t argc = ARGS_ONE;
1336 napi_value argv[ARGS_ONE] = {0};
1337 napi_value thisVar = nullptr;
1338
1339 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1340 if (!CameraNapiUtils::CheckInvalidArgument(env, argc, ARGS_ONE, argv, ADD_OUTPUT)) {
1341 return result;
1342 }
1343
1344 napi_get_undefined(env, &result);
1345 CameraSessionNapi* cameraSessionNapi = nullptr;
1346 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1347 if (status == napi_ok && cameraSessionNapi != nullptr) {
1348 sptr<CaptureOutput> cameraOutput = nullptr;
1349 result = GetJSArgsForCameraOutput(env, argc, argv, cameraOutput);
1350 int32_t ret = cameraSessionNapi->cameraSession_->AddOutput(cameraOutput);
1351 if (!CameraNapiUtils::CheckError(env, ret)) {
1352 return nullptr;
1353 }
1354 } else {
1355 MEDIA_ERR_LOG("AddOutput call Failed!");
1356 }
1357 return result;
1358 }
1359
CanAddOutput(napi_env env,napi_callback_info info)1360 napi_value CameraSessionNapi::CanAddOutput(napi_env env, napi_callback_info info)
1361 {
1362 MEDIA_DEBUG_LOG("CanAddOutput is called");
1363 napi_status status;
1364 napi_value result = nullptr;
1365 size_t argc = ARGS_ONE;
1366 napi_value argv[ARGS_ONE] = {0};
1367 napi_value thisVar = nullptr;
1368
1369 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1370
1371 napi_get_undefined(env, &result);
1372 CameraSessionNapi* cameraSessionNapi = nullptr;
1373 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1374 if (status == napi_ok && cameraSessionNapi != nullptr) {
1375 sptr<CaptureOutput> cameraOutput = nullptr;
1376 result = GetJSArgsForCameraOutput(env, argc, argv, cameraOutput);
1377 bool isSupported = cameraSessionNapi->cameraSession_->CanAddOutput(cameraOutput);
1378 napi_get_boolean(env, isSupported, &result);
1379 } else {
1380 MEDIA_ERR_LOG("CanAddOutput call Failed!");
1381 }
1382 return result;
1383 }
1384
RemoveOutput(napi_env env,napi_callback_info info)1385 napi_value CameraSessionNapi::RemoveOutput(napi_env env, napi_callback_info info)
1386 {
1387 MEDIA_INFO_LOG("RemoveOutput is called");
1388 napi_status status;
1389 napi_value result = nullptr;
1390 size_t argc = ARGS_ONE;
1391 napi_value argv[ARGS_ONE] = {0};
1392 napi_value thisVar = nullptr;
1393
1394 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1395 if (!CameraNapiUtils::CheckInvalidArgument(env, argc, ARGS_ONE, argv, REMOVE_OUTPUT)) {
1396 return result;
1397 }
1398
1399 napi_get_undefined(env, &result);
1400 CameraSessionNapi* cameraSessionNapi = nullptr;
1401 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1402 if (status == napi_ok && cameraSessionNapi != nullptr) {
1403 sptr<CaptureOutput> cameraOutput = nullptr;
1404 result = GetJSArgsForCameraOutput(env, argc, argv, cameraOutput);
1405 int32_t ret = cameraSessionNapi->cameraSession_->RemoveOutput(cameraOutput);
1406 if (!CameraNapiUtils::CheckError(env, ret)) {
1407 return nullptr;
1408 }
1409 } else {
1410 MEDIA_ERR_LOG("RemoveOutput call Failed!");
1411 }
1412 return result;
1413 }
1414
Start(napi_env env,napi_callback_info info)1415 napi_value CameraSessionNapi::Start(napi_env env, napi_callback_info info)
1416 {
1417 MEDIA_INFO_LOG("Start is called");
1418 std::unique_ptr<CameraSessionAsyncContext> asyncContext = std::make_unique<CameraSessionAsyncContext>(
1419 "CameraSessionNapi::Start", CameraNapiUtils::IncrementAndGet(cameraSessionTaskId));
1420 auto asyncFunction =
1421 std::make_shared<CameraNapiAsyncFunction>(env, "Start", asyncContext->callbackRef, asyncContext->deferred);
1422 CameraNapiParamParser jsParamParser(env, info, asyncContext->objectInfo, asyncFunction);
1423 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "invalid argument")) {
1424 MEDIA_ERR_LOG("CameraSessionNapi::Start invalid argument");
1425 return nullptr;
1426 }
1427 asyncContext->HoldNapiValue(env, jsParamParser.GetThisVar());
1428 napi_status status = napi_create_async_work(
1429 env, nullptr, asyncFunction->GetResourceName(),
1430 [](napi_env env, void* data) {
1431 MEDIA_INFO_LOG("CameraSessionNapi::Start running on worker");
1432 auto context = static_cast<CameraSessionAsyncContext*>(data);
1433 CHECK_ERROR_RETURN_LOG(context->objectInfo == nullptr, "CameraSessionNapi::Start async info is nullptr");
1434 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
1435 CameraNapiWorkerQueueKeeper::GetInstance()->ConsumeWorkerQueueTask(context->queueTask, [&context]() {
1436 context->errorCode = context->objectInfo->cameraSession_->Start();
1437 context->status = context->errorCode == CameraErrorCode::SUCCESS;
1438 MEDIA_INFO_LOG("CameraSessionNapi::Start errorCode:%{public}d", context->errorCode);
1439 });
1440 },
1441 AsyncCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
1442 if (status != napi_ok) {
1443 MEDIA_ERR_LOG("Failed to create napi_create_async_work for CameraSessionNapi::Start");
1444 asyncFunction->Reset();
1445 } else {
1446 asyncContext->queueTask =
1447 CameraNapiWorkerQueueKeeper::GetInstance()->AcquireWorkerQueueTask("CameraSessionNapi::Start");
1448 napi_queue_async_work_with_qos(env, asyncContext->work, napi_qos_user_initiated);
1449 asyncContext.release();
1450 }
1451 if (asyncFunction->GetAsyncFunctionType() == ASYNC_FUN_TYPE_PROMISE) {
1452 return asyncFunction->GetPromise();
1453 }
1454 return CameraNapiUtils::GetUndefinedValue(env);
1455 }
1456
Stop(napi_env env,napi_callback_info info)1457 napi_value CameraSessionNapi::Stop(napi_env env, napi_callback_info info)
1458 {
1459 MEDIA_INFO_LOG("Stop is called");
1460 std::unique_ptr<CameraSessionAsyncContext> asyncContext = std::make_unique<CameraSessionAsyncContext>(
1461 "CameraSessionNapi::Stop", CameraNapiUtils::IncrementAndGet(cameraSessionTaskId));
1462 auto asyncFunction =
1463 std::make_shared<CameraNapiAsyncFunction>(env, "Stop", asyncContext->callbackRef, asyncContext->deferred);
1464 CameraNapiParamParser jsParamParser(env, info, asyncContext->objectInfo, asyncFunction);
1465 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "invalid argument")) {
1466 MEDIA_ERR_LOG("CameraSessionNapi::Stop invalid argument");
1467 return nullptr;
1468 }
1469 asyncContext->HoldNapiValue(env, jsParamParser.GetThisVar());
1470 napi_status status = napi_create_async_work(
1471 env, nullptr, asyncFunction->GetResourceName(),
1472 [](napi_env env, void* data) {
1473 MEDIA_INFO_LOG("CameraSessionNapi::Stop running on worker");
1474 auto context = static_cast<CameraSessionAsyncContext*>(data);
1475 CHECK_ERROR_RETURN_LOG(context->objectInfo == nullptr, "CameraSessionNapi::Stop async info is nullptr");
1476 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
1477 CameraNapiWorkerQueueKeeper::GetInstance()->ConsumeWorkerQueueTask(context->queueTask, [&context]() {
1478 context->errorCode = context->objectInfo->cameraSession_->Stop();
1479 context->status = context->errorCode == CameraErrorCode::SUCCESS;
1480 MEDIA_INFO_LOG("CameraSessionNapi::Stop errorCode:%{public}d", context->errorCode);
1481 });
1482 },
1483 AsyncCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
1484 if (status != napi_ok) {
1485 MEDIA_ERR_LOG("Failed to create napi_create_async_work for CameraSessionNapi::Stop");
1486 asyncFunction->Reset();
1487 } else {
1488 asyncContext->queueTask =
1489 CameraNapiWorkerQueueKeeper::GetInstance()->AcquireWorkerQueueTask("CameraSessionNapi::Stop");
1490 napi_queue_async_work_with_qos(env, asyncContext->work, napi_qos_user_initiated);
1491 asyncContext.release();
1492 }
1493 if (asyncFunction->GetAsyncFunctionType() == ASYNC_FUN_TYPE_PROMISE) {
1494 return asyncFunction->GetPromise();
1495 }
1496 return CameraNapiUtils::GetUndefinedValue(env);
1497 }
1498
Release(napi_env env,napi_callback_info info)1499 napi_value CameraSessionNapi::Release(napi_env env, napi_callback_info info)
1500 {
1501 MEDIA_INFO_LOG("Release is called");
1502 std::unique_ptr<CameraSessionAsyncContext> asyncContext = std::make_unique<CameraSessionAsyncContext>(
1503 "CameraSessionNapi::Release", CameraNapiUtils::IncrementAndGet(cameraSessionTaskId));
1504 auto asyncFunction =
1505 std::make_shared<CameraNapiAsyncFunction>(env, "Release", asyncContext->callbackRef, asyncContext->deferred);
1506 CameraNapiParamParser jsParamParser(env, info, asyncContext->objectInfo, asyncFunction);
1507 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "invalid argument")) {
1508 MEDIA_ERR_LOG("CameraSessionNapi::Release invalid argument");
1509 return nullptr;
1510 }
1511 asyncContext->HoldNapiValue(env, jsParamParser.GetThisVar());
1512 napi_status status = napi_create_async_work(
1513 env, nullptr, asyncFunction->GetResourceName(),
1514 [](napi_env env, void* data) {
1515 MEDIA_INFO_LOG("CameraSessionNapi::Release running on worker");
1516 auto context = static_cast<CameraSessionAsyncContext*>(data);
1517 CHECK_ERROR_RETURN_LOG(context->objectInfo == nullptr, "CameraSessionNapi::Release async info is nullptr");
1518 CAMERA_START_ASYNC_TRACE(context->funcName, context->taskId);
1519 CameraNapiWorkerQueueKeeper::GetInstance()->ConsumeWorkerQueueTask(context->queueTask, [&context]() {
1520 context->errorCode = context->objectInfo->cameraSession_->Release();
1521 context->status = context->errorCode == CameraErrorCode::SUCCESS;
1522 MEDIA_INFO_LOG("CameraSessionNapi::Release errorCode:%{public}d", context->errorCode);
1523 });
1524 },
1525 AsyncCompleteCallback, static_cast<void*>(asyncContext.get()), &asyncContext->work);
1526 if (status != napi_ok) {
1527 MEDIA_ERR_LOG("Failed to create napi_create_async_work for CameraSessionNapi::Release");
1528 asyncFunction->Reset();
1529 } else {
1530 asyncContext->queueTask =
1531 CameraNapiWorkerQueueKeeper::GetInstance()->AcquireWorkerQueueTask("CameraSessionNapi::Release");
1532 napi_queue_async_work_with_qos(env, asyncContext->work, napi_qos_user_initiated);
1533 asyncContext.release();
1534 }
1535 if (asyncFunction->GetAsyncFunctionType() == ASYNC_FUN_TYPE_PROMISE) {
1536 return asyncFunction->GetPromise();
1537 }
1538 return CameraNapiUtils::GetUndefinedValue(env);
1539 }
1540
IsVideoStabilizationModeSupported(napi_env env,napi_callback_info info)1541 napi_value CameraSessionNapi::IsVideoStabilizationModeSupported(napi_env env, napi_callback_info info)
1542 {
1543 MEDIA_DEBUG_LOG("IsVideoStabilizationModeSupported is called");
1544 napi_status status;
1545 napi_value result = nullptr;
1546 size_t argc = ARGS_ONE;
1547 napi_value argv[ARGS_ONE] = {0};
1548 napi_value thisVar = nullptr;
1549
1550 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1551
1552 napi_get_undefined(env, &result);
1553 CameraSessionNapi* cameraSessionNapi = nullptr;
1554 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1555 if (status == napi_ok && cameraSessionNapi != nullptr) {
1556 int32_t value;
1557 napi_get_value_int32(env, argv[PARAM0], &value);
1558 VideoStabilizationMode videoStabilizationMode = (VideoStabilizationMode)value;
1559 bool isSupported;
1560 int32_t retCode = cameraSessionNapi->cameraSession_->
1561 IsVideoStabilizationModeSupported(videoStabilizationMode, isSupported);
1562 if (!CameraNapiUtils::CheckError(env, retCode)) {
1563 return nullptr;
1564 }
1565 napi_get_boolean(env, isSupported, &result);
1566 } else {
1567 MEDIA_ERR_LOG("IsVideoStabilizationModeSupported call Failed!");
1568 }
1569 return result;
1570 }
1571
GetActiveVideoStabilizationMode(napi_env env,napi_callback_info info)1572 napi_value CameraSessionNapi::GetActiveVideoStabilizationMode(napi_env env, napi_callback_info info)
1573 {
1574 MEDIA_DEBUG_LOG("GetActiveVideoStabilizationMode is called");
1575 napi_status status;
1576 napi_value result = nullptr;
1577 size_t argc = ARGS_ZERO;
1578 napi_value argv[ARGS_ZERO];
1579 napi_value thisVar = nullptr;
1580
1581 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1582
1583 napi_get_undefined(env, &result);
1584 CameraSessionNapi* cameraSessionNapi = nullptr;
1585 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1586 if (status == napi_ok && cameraSessionNapi != nullptr) {
1587 VideoStabilizationMode videoStabilizationMode;
1588 int32_t retCode = cameraSessionNapi->cameraSession_->
1589 GetActiveVideoStabilizationMode(videoStabilizationMode);
1590 if (!CameraNapiUtils::CheckError(env, retCode)) {
1591 return nullptr;
1592 }
1593 napi_create_int32(env, videoStabilizationMode, &result);
1594 } else {
1595 MEDIA_ERR_LOG("GetActiveVideoStabilizationMode call Failed!");
1596 }
1597 return result;
1598 }
1599
SetVideoStabilizationMode(napi_env env,napi_callback_info info)1600 napi_value CameraSessionNapi::SetVideoStabilizationMode(napi_env env, napi_callback_info info)
1601 {
1602 MEDIA_DEBUG_LOG("SetVideoStabilizationMode is called");
1603 napi_status status;
1604 napi_value result = nullptr;
1605 size_t argc = ARGS_ONE;
1606 napi_value argv[ARGS_ONE] = {0};
1607 napi_value thisVar = nullptr;
1608
1609 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1610
1611 napi_get_undefined(env, &result);
1612 CameraSessionNapi* cameraSessionNapi = nullptr;
1613 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1614 if (status == napi_ok && cameraSessionNapi != nullptr) {
1615 int32_t value;
1616 napi_get_value_int32(env, argv[PARAM0], &value);
1617 VideoStabilizationMode videoStabilizationMode = (VideoStabilizationMode)value;
1618 int retCode = cameraSessionNapi->cameraSession_->SetVideoStabilizationMode(videoStabilizationMode);
1619 if (!CameraNapiUtils::CheckError(env, retCode)) {
1620 return nullptr;
1621 }
1622 } else {
1623 MEDIA_ERR_LOG("SetVideoStabilizationMode call Failed!");
1624 }
1625 return result;
1626 }
1627
HasFlash(napi_env env,napi_callback_info info)1628 napi_value CameraSessionNapi::HasFlash(napi_env env, napi_callback_info info)
1629 {
1630 MEDIA_DEBUG_LOG("HasFlash is called");
1631 napi_status status;
1632 napi_value result = nullptr;
1633 size_t argc = ARGS_ZERO;
1634 napi_value argv[ARGS_ZERO];
1635 napi_value thisVar = nullptr;
1636
1637 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1638
1639 napi_get_undefined(env, &result);
1640 CameraSessionNapi* cameraSessionNapi = nullptr;
1641 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1642 if (status == napi_ok && cameraSessionNapi != nullptr) {
1643 bool isSupported = false;
1644 int retCode = cameraSessionNapi->cameraSession_->HasFlash(isSupported);
1645 if (!CameraNapiUtils::CheckError(env, retCode)) {
1646 return nullptr;
1647 }
1648 napi_get_boolean(env, isSupported, &result);
1649 } else {
1650 MEDIA_ERR_LOG("HasFlash call Failed!");
1651 }
1652 return result;
1653 }
1654
IsFlashModeSupported(napi_env env,napi_callback_info info)1655 napi_value CameraSessionNapi::IsFlashModeSupported(napi_env env, napi_callback_info info)
1656 {
1657 MEDIA_DEBUG_LOG("IsFlashModeSupported is called");
1658 napi_status status;
1659 napi_value result = nullptr;
1660 size_t argc = ARGS_ONE;
1661 napi_value argv[ARGS_ONE] = {0};
1662 napi_value thisVar = nullptr;
1663
1664 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1665
1666 napi_get_undefined(env, &result);
1667 CameraSessionNapi* cameraSessionNapi = nullptr;
1668 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1669 if (status == napi_ok && cameraSessionNapi != nullptr) {
1670 int32_t value;
1671 napi_get_value_int32(env, argv[PARAM0], &value);
1672 FlashMode flashMode = (FlashMode)value;
1673 bool isSupported;
1674 int32_t retCode = cameraSessionNapi->cameraSession_->IsFlashModeSupported(flashMode, isSupported);
1675 if (!CameraNapiUtils::CheckError(env, retCode)) {
1676 return nullptr;
1677 }
1678 napi_get_boolean(env, isSupported, &result);
1679 } else {
1680 MEDIA_ERR_LOG("IsFlashModeSupported call Failed!");
1681 }
1682 return result;
1683 }
1684
SetFlashMode(napi_env env,napi_callback_info info)1685 napi_value CameraSessionNapi::SetFlashMode(napi_env env, napi_callback_info info)
1686 {
1687 MEDIA_DEBUG_LOG("SetFlashMode is called");
1688 CAMERA_SYNC_TRACE;
1689 napi_status status;
1690 napi_value result = nullptr;
1691 size_t argc = ARGS_ONE;
1692 napi_value argv[ARGS_ONE] = {0};
1693 napi_value thisVar = nullptr;
1694
1695 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1696
1697 napi_get_undefined(env, &result);
1698 CameraSessionNapi* cameraSessionNapi = nullptr;
1699 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1700 if (status == napi_ok && cameraSessionNapi != nullptr) {
1701 int32_t value;
1702 napi_get_value_int32(env, argv[PARAM0], &value);
1703 MEDIA_INFO_LOG("CameraSessionNapi::SetFlashMode mode:%{public}d", value);
1704 FlashMode flashMode = (FlashMode)value;
1705 cameraSessionNapi->cameraSession_->LockForControl();
1706 int retCode = cameraSessionNapi->cameraSession_->SetFlashMode(flashMode);
1707 cameraSessionNapi->cameraSession_->UnlockForControl();
1708 if (!CameraNapiUtils::CheckError(env, retCode)) {
1709 return nullptr;
1710 }
1711 } else {
1712 MEDIA_ERR_LOG("SetFlashMode call Failed!");
1713 }
1714 return result;
1715 }
1716
GetFlashMode(napi_env env,napi_callback_info info)1717 napi_value CameraSessionNapi::GetFlashMode(napi_env env, napi_callback_info info)
1718 {
1719 MEDIA_DEBUG_LOG("GetFlashMode is called");
1720 napi_status status;
1721 napi_value result = nullptr;
1722 size_t argc = ARGS_ZERO;
1723 napi_value argv[ARGS_ZERO];
1724 napi_value thisVar = nullptr;
1725
1726 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1727 napi_get_undefined(env, &result);
1728 CameraSessionNapi* cameraSessionNapi = nullptr;
1729 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1730 if (status == napi_ok && cameraSessionNapi != nullptr) {
1731 FlashMode flashMode;
1732 int32_t retCode = cameraSessionNapi->cameraSession_->GetFlashMode(flashMode);
1733 if (!CameraNapiUtils::CheckError(env, retCode)) {
1734 return nullptr;
1735 }
1736 napi_create_int32(env, flashMode, &result);
1737 } else {
1738 MEDIA_ERR_LOG("GetFlashMode call Failed!");
1739 }
1740 return result;
1741 }
1742
IsLcdFlashSupported(napi_env env,napi_callback_info info)1743 napi_value CameraSessionNapi::IsLcdFlashSupported(napi_env env, napi_callback_info info)
1744 {
1745 MEDIA_DEBUG_LOG("IsLcdFlashSupported is called");
1746 CAMERA_SYNC_TRACE;
1747 napi_value result = CameraNapiUtils::GetUndefinedValue(env);
1748 if (!CameraNapiSecurity::CheckSystemApp(env)) {
1749 MEDIA_ERR_LOG("SystemApi isLcdFlashSupported is called!");
1750 return result;
1751 }
1752 CameraSessionNapi* cameraSessionNapi = nullptr;
1753 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
1754 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
1755 MEDIA_ERR_LOG("IsLcdFlashSupported parse parameter occur error");
1756 return result;
1757 }
1758 if (cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
1759 bool isSupported = cameraSessionNapi->cameraSession_->IsLcdFlashSupported();
1760 napi_get_boolean(env, isSupported, &result);
1761 } else {
1762 MEDIA_ERR_LOG("IsLcdFlashSupported call Failed!");
1763 }
1764 return result;
1765 }
1766
EnableLcdFlash(napi_env env,napi_callback_info info)1767 napi_value CameraSessionNapi::EnableLcdFlash(napi_env env, napi_callback_info info)
1768 {
1769 MEDIA_DEBUG_LOG("EnableLcdFlash is called");
1770 napi_value result = CameraNapiUtils::GetUndefinedValue(env);
1771 if (!CameraNapiSecurity::CheckSystemApp(env)) {
1772 MEDIA_ERR_LOG("SystemApi enableLcdFlash is called!");
1773 return result;
1774 }
1775 bool isEnable;
1776 CameraSessionNapi* cameraSessionNapi = nullptr;
1777 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, isEnable);
1778 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
1779 MEDIA_ERR_LOG("EnableLcdFlash parse parameter occur error");
1780 return result;
1781 }
1782
1783 if (cameraSessionNapi->cameraSession_ != nullptr) {
1784 MEDIA_INFO_LOG("EnableLcdFlash:%{public}d", isEnable);
1785 cameraSessionNapi->cameraSession_->LockForControl();
1786 int32_t retCode = cameraSessionNapi->cameraSession_->EnableLcdFlash(isEnable);
1787 cameraSessionNapi->cameraSession_->UnlockForControl();
1788 if (!CameraNapiUtils::CheckError(env, retCode)) {
1789 MEDIA_ERR_LOG("EnableLcdFlash fail %{public}d", retCode);
1790 return result;
1791 }
1792 } else {
1793 MEDIA_ERR_LOG("EnableLcdFlash get native object fail");
1794 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
1795 return result;
1796 }
1797 return result;
1798 }
1799
IsExposureModeSupported(napi_env env,napi_callback_info info)1800 napi_value CameraSessionNapi::IsExposureModeSupported(napi_env env, napi_callback_info info)
1801 {
1802 MEDIA_DEBUG_LOG("IsExposureModeSupported is called");
1803 napi_status status;
1804 napi_value result = nullptr;
1805 size_t argc = ARGS_ONE;
1806 napi_value argv[ARGS_ONE] = {0};
1807 napi_value thisVar = nullptr;
1808
1809 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1810
1811 napi_get_undefined(env, &result);
1812 CameraSessionNapi* cameraSessionNapi = nullptr;
1813 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1814 if (status == napi_ok && cameraSessionNapi != nullptr) {
1815 int32_t value;
1816 napi_get_value_int32(env, argv[PARAM0], &value);
1817 ExposureMode exposureMode = (ExposureMode)value;
1818 bool isSupported;
1819 int32_t retCode = cameraSessionNapi->cameraSession_->
1820 IsExposureModeSupported(static_cast<ExposureMode>(exposureMode), isSupported);
1821 if (!CameraNapiUtils::CheckError(env, retCode)) {
1822 return nullptr;
1823 }
1824 napi_get_boolean(env, isSupported, &result);
1825 } else {
1826 MEDIA_ERR_LOG("IsExposureModeSupported call Failed!");
1827 }
1828 return result;
1829 }
1830
GetExposureMode(napi_env env,napi_callback_info info)1831 napi_value CameraSessionNapi::GetExposureMode(napi_env env, napi_callback_info info)
1832 {
1833 MEDIA_DEBUG_LOG("GetExposureMode is called");
1834 napi_status status;
1835 napi_value result = nullptr;
1836 size_t argc = ARGS_ZERO;
1837 napi_value argv[ARGS_ZERO];
1838 napi_value thisVar = nullptr;
1839
1840 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1841
1842 napi_get_undefined(env, &result);
1843 CameraSessionNapi* cameraSessionNapi = nullptr;
1844 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1845 if (status == napi_ok && cameraSessionNapi != nullptr) {
1846 ExposureMode exposureMode;
1847 int32_t retCode = cameraSessionNapi->cameraSession_->GetExposureMode(exposureMode);
1848 if (!CameraNapiUtils::CheckError(env, retCode)) {
1849 return nullptr;
1850 }
1851 napi_create_int32(env, exposureMode, &result);
1852 } else {
1853 MEDIA_ERR_LOG("GetExposureMode call Failed!");
1854 }
1855 return result;
1856 }
1857
SetExposureMode(napi_env env,napi_callback_info info)1858 napi_value CameraSessionNapi::SetExposureMode(napi_env env, napi_callback_info info)
1859 {
1860 MEDIA_DEBUG_LOG("SetExposureMode is called");
1861 CAMERA_SYNC_TRACE;
1862 napi_status status;
1863 napi_value result = nullptr;
1864 size_t argc = ARGS_ONE;
1865 napi_value argv[ARGS_ONE] = {0};
1866 napi_value thisVar = nullptr;
1867
1868 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1869
1870 napi_get_undefined(env, &result);
1871 CameraSessionNapi* cameraSessionNapi = nullptr;
1872 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1873 if (status == napi_ok && cameraSessionNapi != nullptr) {
1874 int32_t value;
1875 napi_get_value_int32(env, argv[PARAM0], &value);
1876 ExposureMode exposureMode = (ExposureMode)value;
1877 cameraSessionNapi->cameraSession_->LockForControl();
1878 int retCode = cameraSessionNapi->cameraSession_->SetExposureMode(exposureMode);
1879 cameraSessionNapi->cameraSession_->UnlockForControl();
1880 if (!CameraNapiUtils::CheckError(env, retCode)) {
1881 return nullptr;
1882 }
1883 } else {
1884 MEDIA_ERR_LOG("SetExposureMode call Failed!");
1885 }
1886 return result;
1887 }
1888
SetMeteringPoint(napi_env env,napi_callback_info info)1889 napi_value CameraSessionNapi::SetMeteringPoint(napi_env env, napi_callback_info info)
1890 {
1891 MEDIA_DEBUG_LOG("SetMeteringPoint is called");
1892 CAMERA_SYNC_TRACE;
1893 napi_status status;
1894 napi_value result = nullptr;
1895 size_t argc = ARGS_ONE;
1896 napi_value argv[ARGS_ONE] = {0};
1897 napi_value thisVar = nullptr;
1898
1899 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1900
1901 napi_get_undefined(env, &result);
1902 CameraSessionNapi* cameraSessionNapi = nullptr;
1903 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1904 if (status == napi_ok && cameraSessionNapi != nullptr) {
1905 Point exposurePoint;
1906 if (GetPointProperties(env, argv[PARAM0], exposurePoint) == 0) {
1907 cameraSessionNapi->cameraSession_->LockForControl();
1908 int32_t retCode = cameraSessionNapi->cameraSession_->SetMeteringPoint(exposurePoint);
1909 cameraSessionNapi->cameraSession_->UnlockForControl();
1910 if (!CameraNapiUtils::CheckError(env, retCode)) {
1911 return nullptr;
1912 }
1913 } else {
1914 MEDIA_ERR_LOG("get point failed");
1915 }
1916 } else {
1917 MEDIA_ERR_LOG("SetMeteringPoint call Failed!");
1918 }
1919 return result;
1920 }
1921
GetMeteringPoint(napi_env env,napi_callback_info info)1922 napi_value CameraSessionNapi::GetMeteringPoint(napi_env env, napi_callback_info info)
1923 {
1924 MEDIA_DEBUG_LOG("GetMeteringPoint is called");
1925 napi_status status;
1926 napi_value result = nullptr;
1927 size_t argc = ARGS_ZERO;
1928 napi_value argv[ARGS_ZERO];
1929 napi_value thisVar = nullptr;
1930
1931 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1932 napi_get_undefined(env, &result);
1933 CameraSessionNapi* cameraSessionNapi = nullptr;
1934 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1935 if (status == napi_ok && cameraSessionNapi != nullptr) {
1936 Point exposurePoint;
1937 int32_t retCode = cameraSessionNapi->cameraSession_->GetMeteringPoint(exposurePoint);
1938 if (!CameraNapiUtils::CheckError(env, retCode)) {
1939 return nullptr;
1940 }
1941 return GetPointNapiValue(env, exposurePoint);
1942 } else {
1943 MEDIA_ERR_LOG("GetMeteringPoint call Failed!");
1944 }
1945 return result;
1946 }
1947
GetExposureBiasRange(napi_env env,napi_callback_info info)1948 napi_value CameraSessionNapi::GetExposureBiasRange(napi_env env, napi_callback_info info)
1949 {
1950 MEDIA_DEBUG_LOG("GetExposureBiasRange is called");
1951 napi_status status;
1952 napi_value result = nullptr;
1953 size_t argc = ARGS_ZERO;
1954 napi_value argv[ARGS_ZERO];
1955 napi_value thisVar = nullptr;
1956
1957 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1958
1959 napi_get_undefined(env, &result);
1960 CameraSessionNapi* cameraSessionNapi = nullptr;
1961 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
1962 if (status == napi_ok && cameraSessionNapi != nullptr) {
1963 std::vector<float> vecExposureBiasList;
1964 int32_t retCode = cameraSessionNapi->cameraSession_->GetExposureBiasRange(vecExposureBiasList);
1965 if (!CameraNapiUtils::CheckError(env, retCode)) {
1966 return nullptr;
1967 }
1968 if (vecExposureBiasList.empty() || napi_create_array(env, &result) != napi_ok) {
1969 return result;
1970 }
1971 size_t len = vecExposureBiasList.size();
1972 for (size_t i = 0; i < len; i++) {
1973 float exposureBias = vecExposureBiasList[i];
1974 MEDIA_DEBUG_LOG("EXPOSURE_BIAS_RANGE : exposureBias = %{public}f", vecExposureBiasList[i]);
1975 napi_value value;
1976 napi_create_double(env, CameraNapiUtils::FloatToDouble(exposureBias), &value);
1977 napi_set_element(env, result, i, value);
1978 }
1979 MEDIA_DEBUG_LOG("EXPOSURE_BIAS_RANGE ExposureBiasList size : %{public}zu", vecExposureBiasList.size());
1980 } else {
1981 MEDIA_ERR_LOG("GetExposureBiasRange call Failed!");
1982 }
1983 return result;
1984 }
1985
GetExposureValue(napi_env env,napi_callback_info info)1986 napi_value CameraSessionNapi::GetExposureValue(napi_env env, napi_callback_info info)
1987 {
1988 MEDIA_DEBUG_LOG("GetExposureValue is called");
1989 napi_status status;
1990 napi_value result = nullptr;
1991 size_t argc = ARGS_ZERO;
1992 napi_value argv[ARGS_ZERO];
1993 napi_value thisVar = nullptr;
1994
1995 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
1996
1997 napi_get_undefined(env, &result);
1998 CameraSessionNapi* cameraSessionNapi = nullptr;
1999 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2000 if (status == napi_ok && cameraSessionNapi!= nullptr) {
2001 float exposureValue;
2002 int32_t retCode = cameraSessionNapi->cameraSession_->GetExposureValue(exposureValue);
2003 if (!CameraNapiUtils::CheckError(env, retCode)) {
2004 return nullptr;
2005 }
2006 napi_create_double(env, CameraNapiUtils::FloatToDouble(exposureValue), &result);
2007 } else {
2008 MEDIA_ERR_LOG("GetExposureValue call Failed!");
2009 }
2010 return result;
2011 }
2012
SetExposureBias(napi_env env,napi_callback_info info)2013 napi_value CameraSessionNapi::SetExposureBias(napi_env env, napi_callback_info info)
2014 {
2015 MEDIA_DEBUG_LOG("SetExposureBias is called");
2016 CAMERA_SYNC_TRACE;
2017 napi_status status;
2018 napi_value result = nullptr;
2019 size_t argc = ARGS_ONE;
2020 napi_value argv[ARGS_ONE] = {0};
2021 napi_value thisVar = nullptr;
2022
2023 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2024
2025 napi_get_undefined(env, &result);
2026 CameraSessionNapi* cameraSessionNapi = nullptr;
2027 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2028 if (status == napi_ok && cameraSessionNapi != nullptr) {
2029 double exposureValue;
2030 napi_get_value_double(env, argv[PARAM0], &exposureValue);
2031 cameraSessionNapi->cameraSession_->LockForControl();
2032 int32_t retCode = cameraSessionNapi->cameraSession_->SetExposureBias((float)exposureValue);
2033 cameraSessionNapi->cameraSession_->UnlockForControl();
2034 if (!CameraNapiUtils::CheckError(env, retCode)) {
2035 return nullptr;
2036 }
2037 } else {
2038 MEDIA_ERR_LOG("SetExposureBias call Failed!");
2039 }
2040 return result;
2041 }
2042
IsFocusModeSupported(napi_env env,napi_callback_info info)2043 napi_value CameraSessionNapi::IsFocusModeSupported(napi_env env, napi_callback_info info)
2044 {
2045 MEDIA_DEBUG_LOG("IsFocusModeSupported is called");
2046 napi_status status;
2047 napi_value result = nullptr;
2048 size_t argc = ARGS_ONE;
2049 napi_value argv[ARGS_ONE] = {0};
2050 napi_value thisVar = nullptr;
2051
2052 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2053
2054 napi_get_undefined(env, &result);
2055 CameraSessionNapi* cameraSessionNapi = nullptr;
2056 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2057 if (status == napi_ok && cameraSessionNapi != nullptr) {
2058 int32_t value;
2059 napi_get_value_int32(env, argv[PARAM0], &value);
2060 FocusMode focusMode = (FocusMode)value;
2061 bool isSupported;
2062 int32_t retCode = cameraSessionNapi->cameraSession_->IsFocusModeSupported(focusMode,
2063 isSupported);
2064 if (!CameraNapiUtils::CheckError(env, retCode)) {
2065 return nullptr;
2066 }
2067 napi_get_boolean(env, isSupported, &result);
2068 } else {
2069 MEDIA_ERR_LOG("IsFocusModeSupported call Failed!");
2070 }
2071 return result;
2072 }
2073
GetFocalLength(napi_env env,napi_callback_info info)2074 napi_value CameraSessionNapi::GetFocalLength(napi_env env, napi_callback_info info)
2075 {
2076 MEDIA_DEBUG_LOG("GetFocalLength is called");
2077 napi_status status;
2078 napi_value result = nullptr;
2079 size_t argc = ARGS_ZERO;
2080 napi_value argv[ARGS_ZERO];
2081 napi_value thisVar = nullptr;
2082
2083 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2084
2085 napi_get_undefined(env, &result);
2086 CameraSessionNapi* cameraSessionNapi = nullptr;
2087 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2088 if (status == napi_ok && cameraSessionNapi != nullptr) {
2089 float focalLength;
2090 int32_t retCode = cameraSessionNapi->cameraSession_->GetFocalLength(focalLength);
2091 if (!CameraNapiUtils::CheckError(env, retCode)) {
2092 return nullptr;
2093 }
2094 napi_create_double(env, CameraNapiUtils::FloatToDouble(focalLength), &result);
2095 } else {
2096 MEDIA_ERR_LOG("GetFocalLength call Failed!");
2097 }
2098 return result;
2099 }
2100
SetFocusPoint(napi_env env,napi_callback_info info)2101 napi_value CameraSessionNapi::SetFocusPoint(napi_env env, napi_callback_info info)
2102 {
2103 MEDIA_DEBUG_LOG("SetFocusPoint is called");
2104 CAMERA_SYNC_TRACE;
2105 napi_status status;
2106 napi_value result = nullptr;
2107 size_t argc = ARGS_ONE;
2108 napi_value argv[ARGS_ONE] = {0};
2109 napi_value thisVar = nullptr;
2110
2111 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2112
2113 napi_get_undefined(env, &result);
2114 CameraSessionNapi* cameraSessionNapi = nullptr;
2115 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2116 if (status == napi_ok && cameraSessionNapi != nullptr) {
2117 Point focusPoint;
2118 if (GetPointProperties(env, argv[PARAM0], focusPoint) == 0) {
2119 cameraSessionNapi->cameraSession_->LockForControl();
2120 int32_t retCode = cameraSessionNapi->cameraSession_->SetFocusPoint(focusPoint);
2121 cameraSessionNapi->cameraSession_->UnlockForControl();
2122 if (!CameraNapiUtils::CheckError(env, retCode)) {
2123 return nullptr;
2124 }
2125 } else {
2126 MEDIA_ERR_LOG("get point failed");
2127 }
2128 } else {
2129 MEDIA_ERR_LOG("SetFocusPoint call Failed!");
2130 }
2131 return result;
2132 }
2133
GetFocusPoint(napi_env env,napi_callback_info info)2134 napi_value CameraSessionNapi::GetFocusPoint(napi_env env, napi_callback_info info)
2135 {
2136 MEDIA_DEBUG_LOG("GetFocusPoint is called");
2137 napi_status status;
2138 napi_value result = nullptr;
2139 size_t argc = ARGS_ZERO;
2140 napi_value argv[ARGS_ZERO];
2141 napi_value thisVar = nullptr;
2142
2143 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2144
2145 napi_get_undefined(env, &result);
2146 CameraSessionNapi* cameraSessionNapi = nullptr;
2147 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2148 if (status == napi_ok && cameraSessionNapi != nullptr) {
2149 Point focusPoint;
2150 int32_t retCode = cameraSessionNapi->cameraSession_->GetFocusPoint(focusPoint);
2151 if (!CameraNapiUtils::CheckError(env, retCode)) {
2152 return nullptr;
2153 }
2154 return GetPointNapiValue(env, focusPoint);
2155 } else {
2156 MEDIA_ERR_LOG("GetFocusPoint call Failed!");
2157 }
2158 return result;
2159 }
2160
GetFocusMode(napi_env env,napi_callback_info info)2161 napi_value CameraSessionNapi::GetFocusMode(napi_env env, napi_callback_info info)
2162 {
2163 MEDIA_DEBUG_LOG("GetFocusMode is called");
2164 napi_status status;
2165 napi_value result = nullptr;
2166 size_t argc = ARGS_ZERO;
2167 napi_value argv[ARGS_ZERO];
2168 napi_value thisVar = nullptr;
2169
2170 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2171
2172 napi_get_undefined(env, &result);
2173 CameraSessionNapi* cameraSessionNapi = nullptr;
2174 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2175 if (status == napi_ok && cameraSessionNapi != nullptr) {
2176 FocusMode focusMode;
2177 int32_t retCode = cameraSessionNapi->cameraSession_->GetFocusMode(focusMode);
2178 if (!CameraNapiUtils::CheckError(env, retCode)) {
2179 return nullptr;
2180 }
2181 napi_create_int32(env, focusMode, &result);
2182 } else {
2183 MEDIA_ERR_LOG("GetFocusMode call Failed!");
2184 }
2185 return result;
2186 }
2187
SetFocusMode(napi_env env,napi_callback_info info)2188 napi_value CameraSessionNapi::SetFocusMode(napi_env env, napi_callback_info info)
2189 {
2190 MEDIA_DEBUG_LOG("SetFocusMode is called");
2191 CAMERA_SYNC_TRACE;
2192 napi_status status;
2193 napi_value result = nullptr;
2194 size_t argc = ARGS_ONE;
2195 napi_value argv[ARGS_ONE] = {0};
2196 napi_value thisVar = nullptr;
2197
2198 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2199
2200 napi_get_undefined(env, &result);
2201 CameraSessionNapi* cameraSessionNapi = nullptr;
2202 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2203 if (status == napi_ok && cameraSessionNapi != nullptr) {
2204 int32_t value;
2205 napi_get_value_int32(env, argv[PARAM0], &value);
2206 FocusMode focusMode = (FocusMode)value;
2207 cameraSessionNapi->cameraSession_->LockForControl();
2208 int retCode = cameraSessionNapi->cameraSession_->
2209 SetFocusMode(static_cast<FocusMode>(focusMode));
2210 cameraSessionNapi->cameraSession_->UnlockForControl();
2211 if (!CameraNapiUtils::CheckError(env, retCode)) {
2212 return nullptr;
2213 }
2214 } else {
2215 MEDIA_ERR_LOG("SetFocusMode call Failed!");
2216 }
2217 return result;
2218 }
2219
IsFocusRangeTypeSupported(napi_env env,napi_callback_info info)2220 napi_value CameraSessionNapi::IsFocusRangeTypeSupported(napi_env env, napi_callback_info info)
2221 {
2222 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2223 "SystemApi IsFocusRangeTypeSupported is called!");
2224 MEDIA_DEBUG_LOG("IsFocusRangeTypeSupported is called");
2225 napi_status status;
2226 napi_value result = nullptr;
2227 size_t argc = ARGS_ONE;
2228 napi_value argv[ARGS_ONE] = {0};
2229 napi_value thisVar = nullptr;
2230
2231 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2232
2233 napi_get_undefined(env, &result);
2234 CameraSessionNapi* cameraSessionNapi = nullptr;
2235 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2236 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2237 int32_t value = 0;
2238 napi_get_value_int32(env, argv[PARAM0], &value);
2239 FocusRangeType focusRangeType = static_cast<FocusRangeType>(value);
2240 bool isSupported = false;
2241 int32_t retCode = cameraSessionNapi->cameraSession_->IsFocusRangeTypeSupported(focusRangeType, isSupported);
2242 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2243 napi_get_boolean(env, isSupported, &result);
2244 } else {
2245 MEDIA_ERR_LOG("IsFocusRangeTypeSupported call Failed!");
2246 }
2247 return result;
2248 }
2249
GetFocusRange(napi_env env,napi_callback_info info)2250 napi_value CameraSessionNapi::GetFocusRange(napi_env env, napi_callback_info info)
2251 {
2252 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2253 "SystemApi GetFocusRange is called!");
2254 MEDIA_DEBUG_LOG("GetFocusRange is called");
2255 napi_status status;
2256 napi_value result = nullptr;
2257 size_t argc = ARGS_ZERO;
2258 napi_value argv[ARGS_ZERO];
2259 napi_value thisVar = nullptr;
2260
2261 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2262
2263 napi_get_undefined(env, &result);
2264 CameraSessionNapi* cameraSessionNapi = nullptr;
2265 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2266 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2267 FocusRangeType focusRangeType = FocusRangeType::FOCUS_RANGE_TYPE_AUTO;
2268 int32_t retCode = cameraSessionNapi->cameraSession_->GetFocusRange(focusRangeType);
2269 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2270 napi_create_int32(env, focusRangeType, &result);
2271 } else {
2272 MEDIA_ERR_LOG("GetFocusRange call Failed!");
2273 }
2274 return result;
2275 }
2276
SetFocusRange(napi_env env,napi_callback_info info)2277 napi_value CameraSessionNapi::SetFocusRange(napi_env env, napi_callback_info info)
2278 {
2279 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2280 "SystemApi SetFocusRange is called!");
2281 MEDIA_DEBUG_LOG("SetFocusRange is called");
2282 napi_status status;
2283 napi_value result = nullptr;
2284 size_t argc = ARGS_ONE;
2285 napi_value argv[ARGS_ONE] = {0};
2286 napi_value thisVar = nullptr;
2287
2288 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2289
2290 napi_get_undefined(env, &result);
2291 CameraSessionNapi* cameraSessionNapi = nullptr;
2292 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2293 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2294 int32_t value = 0;
2295 napi_get_value_int32(env, argv[PARAM0], &value);
2296 FocusRangeType focusRangeType = static_cast<FocusRangeType>(value);
2297 cameraSessionNapi->cameraSession_->LockForControl();
2298 int retCode = cameraSessionNapi->cameraSession_->SetFocusRange(focusRangeType);
2299 cameraSessionNapi->cameraSession_->UnlockForControl();
2300 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2301 } else {
2302 MEDIA_ERR_LOG("SetFocusRange call Failed!");
2303 }
2304 return result;
2305 }
2306
IsFocusDrivenTypeSupported(napi_env env,napi_callback_info info)2307 napi_value CameraSessionNapi::IsFocusDrivenTypeSupported(napi_env env, napi_callback_info info)
2308 {
2309 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2310 "SystemApi IsFocusDrivenTypeSupported is called!");
2311 MEDIA_DEBUG_LOG("IsFocusDrivenTypeSupported is called");
2312 napi_status status;
2313 napi_value result = nullptr;
2314 size_t argc = ARGS_ONE;
2315 napi_value argv[ARGS_ONE] = {0};
2316 napi_value thisVar = nullptr;
2317
2318 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2319
2320 napi_get_undefined(env, &result);
2321 CameraSessionNapi* cameraSessionNapi = nullptr;
2322 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2323 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2324 int32_t value = 0;
2325 napi_get_value_int32(env, argv[PARAM0], &value);
2326 FocusDrivenType focusDrivenType = static_cast<FocusDrivenType>(value);
2327 bool isSupported = false;
2328 int32_t retCode = cameraSessionNapi->cameraSession_->IsFocusDrivenTypeSupported(focusDrivenType, isSupported);
2329 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2330 napi_get_boolean(env, isSupported, &result);
2331 } else {
2332 MEDIA_ERR_LOG("IsFocusDrivenTypeSupported call Failed!");
2333 }
2334 return result;
2335 }
2336
GetFocusDriven(napi_env env,napi_callback_info info)2337 napi_value CameraSessionNapi::GetFocusDriven(napi_env env, napi_callback_info info)
2338 {
2339 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2340 "SystemApi GetFocusDriven is called!");
2341 MEDIA_DEBUG_LOG("GetFocusDriven is called");
2342 napi_status status;
2343 napi_value result = nullptr;
2344 size_t argc = ARGS_ZERO;
2345 napi_value argv[ARGS_ZERO];
2346 napi_value thisVar = nullptr;
2347
2348 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2349
2350 napi_get_undefined(env, &result);
2351 CameraSessionNapi* cameraSessionNapi = nullptr;
2352 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2353 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2354 FocusDrivenType focusDrivenType = FocusDrivenType::FOCUS_DRIVEN_TYPE_AUTO;
2355 int32_t retCode = cameraSessionNapi->cameraSession_->GetFocusDriven(focusDrivenType);
2356 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2357 napi_create_int32(env, focusDrivenType, &result);
2358 } else {
2359 MEDIA_ERR_LOG("GetFocusDriven call Failed!");
2360 }
2361 return result;
2362 }
2363
SetFocusDriven(napi_env env,napi_callback_info info)2364 napi_value CameraSessionNapi::SetFocusDriven(napi_env env, napi_callback_info info)
2365 {
2366 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2367 "SystemApi SetFocusDriven is called!");
2368 MEDIA_DEBUG_LOG("SetFocusDriven is called");
2369 napi_status status;
2370 napi_value result = nullptr;
2371 size_t argc = ARGS_ONE;
2372 napi_value argv[ARGS_ONE] = {0};
2373 napi_value thisVar = nullptr;
2374
2375 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2376
2377 napi_get_undefined(env, &result);
2378 CameraSessionNapi* cameraSessionNapi = nullptr;
2379 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2380 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2381 int32_t value = 0;
2382 napi_get_value_int32(env, argv[PARAM0], &value);
2383 FocusDrivenType focusDrivenType = static_cast<FocusDrivenType>(value);
2384 cameraSessionNapi->cameraSession_->LockForControl();
2385 int retCode = cameraSessionNapi->cameraSession_->SetFocusDriven(focusDrivenType);
2386 cameraSessionNapi->cameraSession_->UnlockForControl();
2387 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2388 } else {
2389 MEDIA_ERR_LOG("SetFocusDriven call Failed!");
2390 }
2391 return result;
2392 }
2393
GetSupportedColorReservationTypes(napi_env env,napi_callback_info info)2394 napi_value CameraSessionNapi::GetSupportedColorReservationTypes(napi_env env, napi_callback_info info)
2395 {
2396 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2397 "SystemApi GetSupportedColorReservationTypes is called!");
2398 MEDIA_DEBUG_LOG("GetSupportedColorReservationTypes is called");
2399 napi_status status;
2400 napi_value result = nullptr;
2401 size_t argc = ARGS_ZERO;
2402 napi_value argv[ARGS_ZERO];
2403 napi_value thisVar = nullptr;
2404
2405 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2406
2407 napi_get_undefined(env, &result);
2408 status = napi_create_array(env, &result);
2409 CHECK_ERROR_RETURN_RET_LOG(status != napi_ok, result, "napi_create_array call Failed!");
2410 CameraSessionNapi* cameraSessionNapi = nullptr;
2411 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2412 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2413 std::vector<ColorReservationType> colorReservationTypes;
2414 int32_t retCode = cameraSessionNapi->cameraSession_->GetSupportedColorReservationTypes(colorReservationTypes);
2415 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2416
2417 MEDIA_INFO_LOG("ProfessionSessionNapi::GetSupportedColorReservationTypes len = %{public}zu",
2418 colorReservationTypes.size());
2419 if (!colorReservationTypes.empty()) {
2420 for (size_t i = 0; i < colorReservationTypes.size(); i++) {
2421 ColorReservationType colorReservationType = colorReservationTypes[i];
2422 napi_value value;
2423 napi_create_int32(env, colorReservationType, &value);
2424 napi_set_element(env, result, i, value);
2425 }
2426 }
2427 } else {
2428 MEDIA_ERR_LOG("GetSupportedColorReservationTypes call Failed!");
2429 }
2430 return result;
2431 }
2432
GetColorReservation(napi_env env,napi_callback_info info)2433 napi_value CameraSessionNapi::GetColorReservation(napi_env env, napi_callback_info info)
2434 {
2435 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2436 "SystemApi GetColorReservation is called!");
2437 MEDIA_DEBUG_LOG("GetColorReservation is called");
2438 napi_status status;
2439 napi_value result = nullptr;
2440 size_t argc = ARGS_ZERO;
2441 napi_value argv[ARGS_ZERO];
2442 napi_value thisVar = nullptr;
2443
2444 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2445
2446 napi_get_undefined(env, &result);
2447 CameraSessionNapi* cameraSessionNapi = nullptr;
2448 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2449 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2450 ColorReservationType colorReservationType = ColorReservationType::COLOR_RESERVATION_TYPE_NONE;
2451 int32_t retCode = cameraSessionNapi->cameraSession_->GetColorReservation(colorReservationType);
2452 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2453 napi_create_int32(env, colorReservationType, &result);
2454 } else {
2455 MEDIA_ERR_LOG("GetColorReservation call Failed!");
2456 }
2457 return result;
2458 }
2459
SetColorReservation(napi_env env,napi_callback_info info)2460 napi_value CameraSessionNapi::SetColorReservation(napi_env env, napi_callback_info info)
2461 {
2462 CHECK_ERROR_RETURN_RET_LOG(!CameraNapiSecurity::CheckSystemApp(env), nullptr,
2463 "SystemApi SetColorReservation is called!");
2464 MEDIA_DEBUG_LOG("SetColorReservation is called");
2465 napi_status status;
2466 napi_value result = nullptr;
2467 size_t argc = ARGS_ONE;
2468 napi_value argv[ARGS_ONE] = {0};
2469 napi_value thisVar = nullptr;
2470
2471 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2472
2473 napi_get_undefined(env, &result);
2474 CameraSessionNapi* cameraSessionNapi = nullptr;
2475 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2476 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2477 int32_t value = 0;
2478 napi_get_value_int32(env, argv[PARAM0], &value);
2479 ColorReservationType colorReservationType = static_cast<ColorReservationType>(value);
2480 cameraSessionNapi->cameraSession_->LockForControl();
2481 int retCode = cameraSessionNapi->cameraSession_->SetColorReservation(colorReservationType);
2482 cameraSessionNapi->cameraSession_->UnlockForControl();
2483 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
2484 } else {
2485 MEDIA_ERR_LOG("SetColorReservation call Failed!");
2486 }
2487 return result;
2488 }
2489
SetQualityPrioritization(napi_env env,napi_callback_info info)2490 napi_value CameraSessionNapi::SetQualityPrioritization(napi_env env, napi_callback_info info)
2491 {
2492 MEDIA_DEBUG_LOG("CameraSessionNapi::SetQualityPrioritization enter");
2493
2494 CameraSessionNapi* cameraSessionNapi = nullptr;
2495 int32_t quality;
2496 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, quality);
2497 if (!jsParamParser.AssertStatus(PARAMETER_ERROR, "parse parameter occur error")) {
2498 MEDIA_ERR_LOG("CameraSessionNapi::SetQualityPrioritization parse parameter occur error");
2499 return nullptr;
2500 }
2501
2502 if (cameraSessionNapi->cameraSession_ == nullptr) {
2503 MEDIA_ERR_LOG("CameraSessionNapi::SetQualityPrioritization get native object fail");
2504 CameraNapiUtils::ThrowError(env, PARAMETER_ERROR, "get native object fail");
2505 return nullptr;
2506 }
2507
2508 cameraSessionNapi->cameraSession_->LockForControl();
2509 int retCode =
2510 cameraSessionNapi->cameraSession_->SetQualityPrioritization(static_cast<QualityPrioritization>(quality));
2511 cameraSessionNapi->cameraSession_->UnlockForControl();
2512 if (!CameraNapiUtils::CheckError(env, retCode)) {
2513 MEDIA_ERR_LOG("CameraSessionNapi::SetQualityPrioritization fail! %{public}d", retCode);
2514 return nullptr;
2515 }
2516 MEDIA_DEBUG_LOG("CameraSessionNapi::SetQualityPrioritization success");
2517 return CameraNapiUtils::GetUndefinedValue(env);
2518 }
2519
GetZoomRatioRange(napi_env env,napi_callback_info info)2520 napi_value CameraSessionNapi::GetZoomRatioRange(napi_env env, napi_callback_info info)
2521 {
2522 MEDIA_DEBUG_LOG("GetZoomRatioRange is called");
2523 napi_status status;
2524 napi_value result = nullptr;
2525 size_t argc = ARGS_ZERO;
2526 napi_value argv[ARGS_ZERO];
2527 napi_value thisVar = nullptr;
2528
2529 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2530
2531 napi_get_undefined(env, &result);
2532
2533 CameraSessionNapi* cameraSessionNapi = nullptr;
2534 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2535 if (status == napi_ok && cameraSessionNapi != nullptr) {
2536 std::vector<float> vecZoomRatioList;
2537 int32_t retCode = cameraSessionNapi->cameraSession_->GetZoomRatioRange(vecZoomRatioList);
2538 if (!CameraNapiUtils::CheckError(env, retCode)) {
2539 return nullptr;
2540 }
2541 MEDIA_INFO_LOG("CameraSessionNapi::GetZoomRatioRange len = %{public}zu",
2542 vecZoomRatioList.size());
2543
2544 if (!vecZoomRatioList.empty() && napi_create_array(env, &result) == napi_ok) {
2545 for (size_t i = 0; i < vecZoomRatioList.size(); i++) {
2546 float zoomRatio = vecZoomRatioList[i];
2547 napi_value value;
2548 napi_create_double(env, CameraNapiUtils::FloatToDouble(zoomRatio), &value);
2549 napi_set_element(env, result, i, value);
2550 }
2551 } else {
2552 MEDIA_ERR_LOG("vecSupportedZoomRatioList is empty or failed to create array!");
2553 }
2554 } else {
2555 MEDIA_ERR_LOG("GetZoomRatioRange call Failed!");
2556 }
2557 return result;
2558 }
2559
GetZoomRatio(napi_env env,napi_callback_info info)2560 napi_value CameraSessionNapi::GetZoomRatio(napi_env env, napi_callback_info info)
2561 {
2562 MEDIA_DEBUG_LOG("GetZoomRatio is called");
2563 napi_status status;
2564 napi_value result = nullptr;
2565 size_t argc = ARGS_ZERO;
2566 napi_value argv[ARGS_ZERO];
2567 napi_value thisVar = nullptr;
2568
2569 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2570
2571 napi_get_undefined(env, &result);
2572 CameraSessionNapi* cameraSessionNapi = nullptr;
2573 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2574 if (status == napi_ok && cameraSessionNapi != nullptr) {
2575 float zoomRatio;
2576 int32_t retCode = cameraSessionNapi->cameraSession_->GetZoomRatio(zoomRatio);
2577 if (!CameraNapiUtils::CheckError(env, retCode)) {
2578 return nullptr;
2579 }
2580 napi_create_double(env, CameraNapiUtils::FloatToDouble(zoomRatio), &result);
2581 } else {
2582 MEDIA_ERR_LOG("GetZoomRatio call Failed!");
2583 }
2584 return result;
2585 }
2586
SetZoomRatio(napi_env env,napi_callback_info info)2587 napi_value CameraSessionNapi::SetZoomRatio(napi_env env, napi_callback_info info)
2588 {
2589 MEDIA_DEBUG_LOG("SetZoomRatio is called");
2590 CAMERA_SYNC_TRACE;
2591 napi_status status;
2592 napi_value result = nullptr;
2593
2594 size_t argc = ARGS_ONE;
2595 napi_value argv[ARGS_ONE];
2596 napi_value thisVar = nullptr;
2597
2598 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2599
2600 napi_get_undefined(env, &result);
2601 CameraSessionNapi* cameraSessionNapi = nullptr;
2602 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2603 if (status == napi_ok && cameraSessionNapi != nullptr) {
2604 double zoomRatio;
2605 napi_get_value_double(env, argv[PARAM0], &zoomRatio);
2606 cameraSessionNapi->cameraSession_->LockForControl();
2607 int32_t retCode = cameraSessionNapi->cameraSession_->SetZoomRatio((float)zoomRatio);
2608 cameraSessionNapi->cameraSession_->UnlockForControl();
2609 if (!CameraNapiUtils::CheckError(env, retCode)) {
2610 return nullptr;
2611 }
2612 } else {
2613 MEDIA_ERR_LOG("SetZoomRatio call Failed!");
2614 }
2615 return result;
2616 }
2617
PrepareZoom(napi_env env,napi_callback_info info)2618 napi_value CameraSessionNapi::PrepareZoom(napi_env env, napi_callback_info info)
2619 {
2620 MEDIA_DEBUG_LOG("PrepareZoom is called");
2621 CAMERA_SYNC_TRACE;
2622 napi_status status;
2623 napi_value result = nullptr;
2624
2625 size_t argc = ARGS_ZERO;
2626 napi_value argv[ARGS_ZERO];
2627 napi_value thisVar = nullptr;
2628
2629 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2630
2631 napi_get_undefined(env, &result);
2632 CameraSessionNapi* cameraSessionNapi = nullptr;
2633 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2634 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2635 cameraSessionNapi->cameraSession_->LockForControl();
2636 int32_t retCode = cameraSessionNapi->cameraSession_->PrepareZoom();
2637 cameraSessionNapi->cameraSession_->UnlockForControl();
2638 if (!CameraNapiUtils::CheckError(env, retCode)) {
2639 return nullptr;
2640 }
2641 } else {
2642 MEDIA_ERR_LOG("PrepareZoom call Failed!");
2643 }
2644 return result;
2645 }
2646
UnPrepareZoom(napi_env env,napi_callback_info info)2647 napi_value CameraSessionNapi::UnPrepareZoom(napi_env env, napi_callback_info info)
2648 {
2649 MEDIA_DEBUG_LOG("PrepareZoom is called");
2650 CAMERA_SYNC_TRACE;
2651 napi_status status;
2652 napi_value result = nullptr;
2653
2654 size_t argc = ARGS_ZERO;
2655 napi_value argv[ARGS_ZERO];
2656 napi_value thisVar = nullptr;
2657
2658 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2659
2660 napi_get_undefined(env, &result);
2661 CameraSessionNapi* cameraSessionNapi = nullptr;
2662 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2663 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2664 cameraSessionNapi->cameraSession_->LockForControl();
2665 int32_t retCode = cameraSessionNapi->cameraSession_->UnPrepareZoom();
2666 cameraSessionNapi->cameraSession_->UnlockForControl();
2667 if (!CameraNapiUtils::CheckError(env, retCode)) {
2668 return nullptr;
2669 }
2670 } else {
2671 MEDIA_ERR_LOG("PrepareZoom call Failed!");
2672 }
2673 return result;
2674 }
2675
SetSmoothZoom(napi_env env,napi_callback_info info)2676 napi_value CameraSessionNapi::SetSmoothZoom(napi_env env, napi_callback_info info)
2677 {
2678 MEDIA_DEBUG_LOG("SetSmoothZoom is called");
2679 CAMERA_SYNC_TRACE;
2680 napi_status status;
2681 napi_value result = nullptr;
2682
2683 size_t argc = ARGS_TWO;
2684 napi_value argv[ARGS_TWO];
2685 napi_value thisVar = nullptr;
2686
2687 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2688
2689 napi_get_undefined(env, &result);
2690 CameraSessionNapi* cameraSessionNapi = nullptr;
2691 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2692 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2693 double targetZoomRatio;
2694 int32_t smoothZoomType;
2695 napi_get_value_double(env, argv[PARAM0], &targetZoomRatio);
2696 napi_get_value_int32(env, argv[PARAM1], &smoothZoomType);
2697 cameraSessionNapi->cameraSession_->SetSmoothZoom((float)targetZoomRatio, smoothZoomType);
2698 } else {
2699 MEDIA_ERR_LOG("SetSmoothZoom call Failed!");
2700 }
2701 return result;
2702 }
2703
GetZoomPointInfos(napi_env env,napi_callback_info info)2704 napi_value CameraSessionNapi::GetZoomPointInfos(napi_env env, napi_callback_info info)
2705 {
2706 if (!CameraNapiSecurity::CheckSystemApp(env)) {
2707 MEDIA_ERR_LOG("SystemApi GetZoomPointInfos is called!");
2708 return nullptr;
2709 }
2710 MEDIA_DEBUG_LOG("GetZoomPointInfos is called");
2711 napi_status status;
2712 napi_value result = nullptr;
2713 size_t argc = ARGS_ZERO;
2714 napi_value argv[ARGS_ZERO];
2715 napi_value thisVar = nullptr;
2716
2717 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2718
2719 napi_get_undefined(env, &result);
2720
2721 CameraSessionNapi* cameraSessionNapi = nullptr;
2722 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2723 if (status == napi_ok && cameraSessionNapi != nullptr) {
2724 std::vector<ZoomPointInfo> vecZoomPointInfoList;
2725 int32_t retCode = cameraSessionNapi->cameraSession_->GetZoomPointInfos(vecZoomPointInfoList);
2726 if (!CameraNapiUtils::CheckError(env, retCode)) {
2727 return nullptr;
2728 }
2729 MEDIA_INFO_LOG("CameraSessionNapi::GetZoomPointInfos len = %{public}zu",
2730 vecZoomPointInfoList.size());
2731
2732 if (!vecZoomPointInfoList.empty() && napi_create_array(env, &result) == napi_ok) {
2733 for (size_t i = 0; i < vecZoomPointInfoList.size(); i++) {
2734 ZoomPointInfo zoomPointInfo = vecZoomPointInfoList[i];
2735 napi_value value;
2736 napi_value zoomRatio;
2737 napi_value equivalentFocus;
2738 napi_create_object(env, &value);
2739 napi_create_double(env, CameraNapiUtils::FloatToDouble(zoomPointInfo.zoomRatio), &zoomRatio);
2740 napi_set_named_property(env, value, "zoomRatio", zoomRatio);
2741 napi_create_double(env, zoomPointInfo.equivalentFocalLength, &equivalentFocus);
2742 napi_set_named_property(env, value, "equivalentFocalLength", equivalentFocus);
2743 napi_set_element(env, result, i, value);
2744 }
2745 } else {
2746 MEDIA_ERR_LOG("vecSupportedZoomRatioList is empty or failed to create array!");
2747 }
2748 } else {
2749 MEDIA_ERR_LOG("GetZoomPointInfos call Failed!");
2750 }
2751 return result;
2752 }
2753
GetSupportedFilters(napi_env env,napi_callback_info info)2754 napi_value CameraSessionNapi::GetSupportedFilters(napi_env env, napi_callback_info info)
2755 {
2756 MEDIA_DEBUG_LOG("getSupportedFilters is called");
2757 napi_status status;
2758 napi_value result = nullptr;
2759 size_t argc = ARGS_ZERO;
2760 napi_value argv[ARGS_ZERO];
2761 napi_value thisVar = nullptr;
2762
2763 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2764
2765 napi_get_undefined(env, &result);
2766 status = napi_create_array(env, &result);
2767 if (status != napi_ok) {
2768 MEDIA_ERR_LOG("napi_create_array call Failed!");
2769 return result;
2770 }
2771 CameraSessionNapi* cameraSessionNapi = nullptr;
2772 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2773 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2774 std::vector<FilterType> filterTypes = cameraSessionNapi->cameraSession_->GetSupportedFilters();
2775 MEDIA_INFO_LOG("CameraSessionNapi::GetSupportedFilters len = %{public}zu",
2776 filterTypes.size());
2777 if (!filterTypes.empty()) {
2778 for (size_t i = 0; i < filterTypes.size(); i++) {
2779 FilterType filterType = filterTypes[i];
2780 napi_value value;
2781 napi_create_int32(env, filterType, &value);
2782 napi_set_element(env, result, i, value);
2783 }
2784 }
2785 } else {
2786 MEDIA_ERR_LOG("GetSupportedFilters call Failed!");
2787 }
2788 return result;
2789 }
GetFilter(napi_env env,napi_callback_info info)2790 napi_value CameraSessionNapi::GetFilter(napi_env env, napi_callback_info info)
2791 {
2792 MEDIA_DEBUG_LOG("GetFilter is called");
2793 napi_status status;
2794 napi_value result = nullptr;
2795 size_t argc = ARGS_ZERO;
2796 napi_value argv[ARGS_ZERO];
2797 napi_value thisVar = nullptr;
2798
2799 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2800
2801 napi_get_undefined(env, &result);
2802 CameraSessionNapi* cameraSessionNapi = nullptr;
2803 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2804 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2805 FilterType filterType = cameraSessionNapi->cameraSession_->GetFilter();
2806 napi_create_int32(env, filterType, &result);
2807 } else {
2808 MEDIA_ERR_LOG("GetFilter call Failed!");
2809 }
2810 return result;
2811 }
SetFilter(napi_env env,napi_callback_info info)2812 napi_value CameraSessionNapi::SetFilter(napi_env env, napi_callback_info info)
2813 {
2814 MEDIA_DEBUG_LOG("setFilter is called");
2815 CAMERA_SYNC_TRACE;
2816 napi_status status;
2817 napi_value result = nullptr;
2818 size_t argc = ARGS_ONE;
2819 napi_value argv[ARGS_ONE] = {0};
2820 napi_value thisVar = nullptr;
2821
2822 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2823
2824 napi_get_undefined(env, &result);
2825 CameraSessionNapi* cameraSessionNapi = nullptr;
2826 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2827 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2828 int32_t value;
2829 napi_get_value_int32(env, argv[PARAM0], &value);
2830 FilterType filterType = (FilterType)value;
2831 cameraSessionNapi->cameraSession_->LockForControl();
2832 cameraSessionNapi->cameraSession_->
2833 SetFilter(static_cast<FilterType>(filterType));
2834 cameraSessionNapi->cameraSession_->UnlockForControl();
2835 } else {
2836 MEDIA_ERR_LOG("SetFilter call Failed!");
2837 }
2838 return result;
2839 }
2840
GetSupportedBeautyTypes(napi_env env,napi_callback_info info)2841 napi_value CameraSessionNapi::GetSupportedBeautyTypes(napi_env env, napi_callback_info info)
2842 {
2843 if (!CameraNapiSecurity::CheckSystemApp(env)) {
2844 MEDIA_ERR_LOG("SystemApi GetSupportedBeautyTypes is called!");
2845 return nullptr;
2846 }
2847 MEDIA_DEBUG_LOG("GetSupportedBeautyTypes is called");
2848 napi_status status;
2849 napi_value result = nullptr;
2850 size_t argc = ARGS_ZERO;
2851 napi_value argv[ARGS_ZERO];
2852 napi_value thisVar = nullptr;
2853
2854 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2855
2856 napi_get_undefined(env, &result);
2857 status = napi_create_array(env, &result);
2858 if (status != napi_ok) {
2859 MEDIA_ERR_LOG("napi_create_array call Failed!");
2860 return result;
2861 }
2862 CameraSessionNapi* cameraSessionNapi = nullptr;
2863 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2864 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2865 std::vector<BeautyType> beautyTypes = cameraSessionNapi->cameraSession_->GetSupportedBeautyTypes();
2866 MEDIA_INFO_LOG("CameraSessionNapi::GetSupportedBeautyTypes len = %{public}zu",
2867 beautyTypes.size());
2868 if (!beautyTypes.empty() && status == napi_ok) {
2869 for (size_t i = 0; i < beautyTypes.size(); i++) {
2870 BeautyType beautyType = beautyTypes[i];
2871 napi_value value;
2872 napi_create_int32(env, beautyType, &value);
2873 napi_set_element(env, result, i, value);
2874 }
2875 }
2876 } else {
2877 MEDIA_ERR_LOG("GetSupportedBeautyTypes call Failed!");
2878 }
2879 return result;
2880 }
2881
GetSupportedBeautyRange(napi_env env,napi_callback_info info)2882 napi_value CameraSessionNapi::GetSupportedBeautyRange(napi_env env, napi_callback_info info)
2883 {
2884 if (!CameraNapiSecurity::CheckSystemApp(env)) {
2885 MEDIA_ERR_LOG("SystemApi GetSupportedBeautyRange is called!");
2886 return nullptr;
2887 }
2888 MEDIA_DEBUG_LOG("GetSupportedBeautyRange is called");
2889 napi_status status;
2890 napi_value result = nullptr;
2891 size_t argc = ARGS_ONE;
2892 napi_value argv[ARGS_ONE];
2893 napi_value thisVar = nullptr;
2894
2895 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2896
2897 napi_get_undefined(env, &result);
2898 status = napi_create_array(env, &result);
2899 if (status != napi_ok) {
2900 MEDIA_ERR_LOG("napi_create_array call Failed!");
2901 return result;
2902 }
2903 CameraSessionNapi* cameraSessionNapi = nullptr;
2904 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2905 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2906 int32_t beautyType;
2907 napi_get_value_int32(env, argv[PARAM0], &beautyType);
2908 std::vector<int32_t> beautyRanges =
2909 cameraSessionNapi->cameraSession_->GetSupportedBeautyRange(static_cast<BeautyType>(beautyType));
2910 MEDIA_INFO_LOG("CameraSessionNapi::GetSupportedBeautyRange beautyType = %{public}d, len = %{public}zu",
2911 beautyType, beautyRanges.size());
2912 if (!beautyRanges.empty()) {
2913 for (size_t i = 0; i < beautyRanges.size(); i++) {
2914 int beautyRange = beautyRanges[i];
2915 napi_value value;
2916 napi_create_int32(env, beautyRange, &value);
2917 napi_set_element(env, result, i, value);
2918 }
2919 }
2920 } else {
2921 MEDIA_ERR_LOG("GetSupportedBeautyRange call Failed!");
2922 }
2923 return result;
2924 }
2925
GetBeauty(napi_env env,napi_callback_info info)2926 napi_value CameraSessionNapi::GetBeauty(napi_env env, napi_callback_info info)
2927 {
2928 if (!CameraNapiSecurity::CheckSystemApp(env)) {
2929 MEDIA_ERR_LOG("SystemApi GetBeauty is called!");
2930 return nullptr;
2931 }
2932 MEDIA_DEBUG_LOG("GetBeauty is called");
2933 napi_status status;
2934 napi_value result = nullptr;
2935 size_t argc = ARGS_ONE;
2936 napi_value argv[ARGS_ONE];
2937 napi_value thisVar = nullptr;
2938
2939 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2940
2941 napi_get_undefined(env, &result);
2942 CameraSessionNapi* cameraSessionNapi = nullptr;
2943 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2944 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2945 int32_t beautyType;
2946 napi_get_value_int32(env, argv[PARAM0], &beautyType);
2947 int32_t beautyStrength = cameraSessionNapi->cameraSession_->GetBeauty(static_cast<BeautyType>(beautyType));
2948 napi_create_int32(env, beautyStrength, &result);
2949 } else {
2950 MEDIA_ERR_LOG("GetBeauty call Failed!");
2951 }
2952 return result;
2953 }
2954
SetBeauty(napi_env env,napi_callback_info info)2955 napi_value CameraSessionNapi::SetBeauty(napi_env env, napi_callback_info info)
2956 {
2957 if (!CameraNapiSecurity::CheckSystemApp(env)) {
2958 MEDIA_ERR_LOG("SystemApi SetBeauty is called!");
2959 return nullptr;
2960 }
2961 MEDIA_DEBUG_LOG("SetBeauty is called");
2962 napi_status status;
2963 napi_value result = nullptr;
2964 size_t argc = ARGS_TWO;
2965 napi_value argv[ARGS_TWO];
2966 napi_value thisVar = nullptr;
2967
2968 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
2969
2970 napi_get_undefined(env, &result);
2971 CameraSessionNapi* cameraSessionNapi = nullptr;
2972 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
2973 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
2974 int32_t beautyType;
2975 napi_get_value_int32(env, argv[PARAM0], &beautyType);
2976 int32_t beautyStrength;
2977 napi_get_value_int32(env, argv[PARAM1], &beautyStrength);
2978 cameraSessionNapi->cameraSession_->LockForControl();
2979 cameraSessionNapi->cameraSession_->SetBeauty(static_cast<BeautyType>(beautyType), beautyStrength);
2980 cameraSessionNapi->cameraSession_->UnlockForControl();
2981 } else {
2982 MEDIA_ERR_LOG("SetBeauty call Failed!");
2983 }
2984 return result;
2985 }
2986
GetSupportedPortraitThemeTypes(napi_env env,napi_callback_info info)2987 napi_value CameraSessionNapi::GetSupportedPortraitThemeTypes(napi_env env, napi_callback_info info)
2988 {
2989 if (!CameraNapiSecurity::CheckSystemApp(env)) {
2990 MEDIA_ERR_LOG("SystemApi GetSupportedPortraitThemeTypes is called!");
2991 return nullptr;
2992 }
2993 MEDIA_DEBUG_LOG("GetSupportedPortraitThemeTypes is called");
2994 CameraSessionNapi* cameraSessionNapi = nullptr;
2995 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
2996 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
2997 MEDIA_ERR_LOG("CameraSessionNapi::GetSupportedPortraitThemeTypes parse parameter occur error");
2998 return nullptr;
2999 }
3000
3001 napi_status status;
3002 napi_value result = nullptr;
3003 status = napi_create_array(env, &result);
3004 if (status != napi_ok) {
3005 MEDIA_ERR_LOG("napi_create_array call Failed!");
3006 return nullptr;
3007 }
3008
3009 if (cameraSessionNapi->cameraSession_ != nullptr) {
3010 std::vector<PortraitThemeType> themeTypes;
3011 int32_t retCode = cameraSessionNapi->cameraSession_->GetSupportedPortraitThemeTypes(themeTypes);
3012 if (!CameraNapiUtils::CheckError(env, retCode)) {
3013 return nullptr;
3014 }
3015 MEDIA_INFO_LOG("CameraSessionNapi::GetSupportedPortraitThemeTypes len = %{public}zu", themeTypes.size());
3016 if (!themeTypes.empty()) {
3017 for (size_t i = 0; i < themeTypes.size(); i++) {
3018 napi_value value;
3019 napi_create_int32(env, static_cast<int32_t>(themeTypes[i]), &value);
3020 napi_set_element(env, result, i, value);
3021 }
3022 }
3023 } else {
3024 MEDIA_ERR_LOG("GetSupportedPortraitThemeTypes call Failed!");
3025 }
3026 return result;
3027 }
3028
SetPortraitThemeType(napi_env env,napi_callback_info info)3029 napi_value CameraSessionNapi::SetPortraitThemeType(napi_env env, napi_callback_info info)
3030 {
3031 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3032 MEDIA_ERR_LOG("SystemApi SetPortraitThemeType is called!");
3033 return nullptr;
3034 }
3035 MEDIA_DEBUG_LOG("CameraSessionNapi::SetPortraitThemeType is called");
3036 int32_t type;
3037 CameraSessionNapi* cameraSessionNapi = nullptr;
3038 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, type);
3039 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3040 MEDIA_ERR_LOG("CameraSessionNapi::SetPortraitThemeType parse parameter occur error");
3041 return nullptr;
3042 }
3043
3044 if (cameraSessionNapi->cameraSession_ != nullptr) {
3045 PortraitThemeType portraitThemeType = static_cast<PortraitThemeType>(type);
3046 MEDIA_INFO_LOG("CameraSessionNapi::SetPortraitThemeType:%{public}d", portraitThemeType);
3047 cameraSessionNapi->cameraSession_->LockForControl();
3048 int32_t retCode = cameraSessionNapi->cameraSession_->SetPortraitThemeType(portraitThemeType);
3049 cameraSessionNapi->cameraSession_->UnlockForControl();
3050 if (!CameraNapiUtils::CheckError(env, retCode)) {
3051 MEDIA_ERR_LOG("CameraSessionNapi::SetPortraitThemeType fail %{public}d", retCode);
3052 return nullptr;
3053 }
3054 } else {
3055 MEDIA_ERR_LOG("CameraSessionNapi::SetPortraitThemeType get native object fail");
3056 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
3057 return nullptr;
3058 }
3059 return CameraNapiUtils::GetUndefinedValue(env);
3060 }
3061
IsPortraitThemeSupported(napi_env env,napi_callback_info info)3062 napi_value CameraSessionNapi::IsPortraitThemeSupported(napi_env env, napi_callback_info info)
3063 {
3064 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3065 MEDIA_ERR_LOG("SystemApi IsPortraitThemeSupported is called!");
3066 return nullptr;
3067 }
3068 MEDIA_DEBUG_LOG("CameraSessionNapi::IsPortraitThemeSupported is called");
3069 CameraSessionNapi* cameraSessionNapi = nullptr;
3070 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
3071 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3072 MEDIA_ERR_LOG("CameraSessionNapi::IsPortraitThemeSupported parse parameter occur error");
3073 return nullptr;
3074 }
3075 auto result = CameraNapiUtils::GetUndefinedValue(env);
3076 if (cameraSessionNapi->cameraSession_ != nullptr) {
3077 bool isSupported;
3078 int32_t retCode = cameraSessionNapi->cameraSession_->IsPortraitThemeSupported(isSupported);
3079 if (!CameraNapiUtils::CheckError(env, retCode)) {
3080 return nullptr;
3081 }
3082 napi_get_boolean(env, isSupported, &result);
3083 } else {
3084 MEDIA_ERR_LOG("CameraSessionNapi::IsPortraitThemeSupported get native object fail");
3085 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
3086 return nullptr;
3087 }
3088 return result;
3089 }
3090
GetSupportedColorSpaces(napi_env env,napi_callback_info info)3091 napi_value CameraSessionNapi::GetSupportedColorSpaces(napi_env env, napi_callback_info info)
3092 {
3093 MEDIA_DEBUG_LOG("GetSupportedColorSpaces is called.");
3094 napi_status status;
3095 napi_value result = nullptr;
3096 size_t argc = ARGS_ZERO;
3097 napi_value argv[ARGS_ZERO];
3098 napi_value thisVar = nullptr;
3099
3100 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3101
3102 napi_get_undefined(env, &result);
3103 status = napi_create_array(env, &result);
3104 if (status != napi_ok) {
3105 MEDIA_ERR_LOG("napi_create_array call Failed!");
3106 return result;
3107 }
3108 CameraSessionNapi* cameraSessionNapi = nullptr;
3109 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3110 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3111 std::vector<ColorSpace> colorSpaces = cameraSessionNapi->cameraSession_->GetSupportedColorSpaces();
3112 if (!colorSpaces.empty()) {
3113 for (size_t i = 0; i < colorSpaces.size(); i++) {
3114 int colorSpace = colorSpaces[i];
3115 napi_value value;
3116 napi_create_int32(env, colorSpace, &value);
3117 napi_set_element(env, result, i, value);
3118 }
3119 }
3120 } else {
3121 MEDIA_ERR_LOG("GetSupportedColorSpaces call Failed!");
3122 }
3123 return result;
3124 }
3125
GetActiveColorSpace(napi_env env,napi_callback_info info)3126 napi_value CameraSessionNapi::GetActiveColorSpace(napi_env env, napi_callback_info info)
3127 {
3128 MEDIA_DEBUG_LOG("GetActiveColorSpace is called");
3129 napi_status status;
3130 napi_value result = nullptr;
3131 size_t argc = ARGS_ZERO;
3132 napi_value argv[ARGS_ZERO];
3133 napi_value thisVar = nullptr;
3134
3135 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3136
3137 napi_get_undefined(env, &result);
3138 CameraSessionNapi* cameraSessionNapi = nullptr;
3139 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3140 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3141 ColorSpace colorSpace;
3142 int32_t retCode = cameraSessionNapi->cameraSession_->GetActiveColorSpace(colorSpace);
3143 if (!CameraNapiUtils::CheckError(env, retCode)) {
3144 return result;
3145 }
3146 napi_create_int32(env, colorSpace, &result);
3147 } else {
3148 MEDIA_ERR_LOG("GetActiveColorSpace call Failed!");
3149 }
3150 return result;
3151 }
3152
SetColorSpace(napi_env env,napi_callback_info info)3153 napi_value CameraSessionNapi::SetColorSpace(napi_env env, napi_callback_info info)
3154 {
3155 MEDIA_DEBUG_LOG("SetColorSpace is called");
3156 CAMERA_SYNC_TRACE;
3157 napi_status status;
3158 napi_value result = nullptr;
3159 size_t argc = ARGS_ONE;
3160 napi_value argv[ARGS_ONE] = {0};
3161 napi_value thisVar = nullptr;
3162
3163 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3164
3165 napi_get_undefined(env, &result);
3166 CameraSessionNapi* cameraSessionNapi = nullptr;
3167 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3168 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3169 int32_t colorSpaceNumber;
3170 napi_get_value_int32(env, argv[PARAM0], &colorSpaceNumber);
3171 ColorSpace colorSpace = (ColorSpace)colorSpaceNumber;
3172 int32_t retCode = cameraSessionNapi->cameraSession_->SetColorSpace(colorSpace);
3173 if (!CameraNapiUtils::CheckError(env, retCode)) {
3174 return result;
3175 }
3176 } else {
3177 MEDIA_ERR_LOG("SetColorSpace call Failed!");
3178 }
3179 return result;
3180 }
3181
GetSupportedColorEffects(napi_env env,napi_callback_info info)3182 napi_value CameraSessionNapi::GetSupportedColorEffects(napi_env env, napi_callback_info info)
3183 {
3184 MEDIA_DEBUG_LOG("GetSupportedColorEffects is called");
3185 napi_status status;
3186 napi_value result = nullptr;
3187 size_t argc = ARGS_ZERO;
3188 napi_value argv[ARGS_ZERO];
3189 napi_value thisVar = nullptr;
3190
3191 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3192
3193 napi_get_undefined(env, &result);
3194 status = napi_create_array(env, &result);
3195 if (status != napi_ok) {
3196 MEDIA_ERR_LOG("napi_create_array call Failed!");
3197 return result;
3198 }
3199 CameraSessionNapi* cameraSessionNapi = nullptr;
3200 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3201 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3202 std::vector<ColorEffect> colorEffects = cameraSessionNapi->cameraSession_->GetSupportedColorEffects();
3203 if (!colorEffects.empty()) {
3204 for (size_t i = 0; i < colorEffects.size(); i++) {
3205 int colorEffect = colorEffects[i];
3206 napi_value value;
3207 napi_create_int32(env, colorEffect, &value);
3208 napi_set_element(env, result, i, value);
3209 }
3210 }
3211 } else {
3212 MEDIA_ERR_LOG("GetSupportedColorEffects call Failed!");
3213 }
3214 return result;
3215 }
3216
GetColorEffect(napi_env env,napi_callback_info info)3217 napi_value CameraSessionNapi::GetColorEffect(napi_env env, napi_callback_info info)
3218 {
3219 MEDIA_DEBUG_LOG("GetColorEffect is called");
3220 napi_status status;
3221 napi_value result = nullptr;
3222 size_t argc = ARGS_ZERO;
3223 napi_value argv[ARGS_ZERO];
3224 napi_value thisVar = nullptr;
3225
3226 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3227
3228 napi_get_undefined(env, &result);
3229 CameraSessionNapi* cameraSessionNapi = nullptr;
3230 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3231 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3232 ColorEffect colorEffect = cameraSessionNapi->cameraSession_->GetColorEffect();
3233 napi_create_int32(env, colorEffect, &result);
3234 } else {
3235 MEDIA_ERR_LOG("GetColorEffect call Failed!");
3236 }
3237 return result;
3238 }
3239
SetColorEffect(napi_env env,napi_callback_info info)3240 napi_value CameraSessionNapi::SetColorEffect(napi_env env, napi_callback_info info)
3241 {
3242 MEDIA_DEBUG_LOG("SetColorEffect is called");
3243 CAMERA_SYNC_TRACE;
3244 napi_status status;
3245 napi_value result = nullptr;
3246 size_t argc = ARGS_ONE;
3247 napi_value argv[ARGS_ONE] = {0};
3248 napi_value thisVar = nullptr;
3249
3250 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3251
3252 napi_get_undefined(env, &result);
3253 CameraSessionNapi* cameraSessionNapi = nullptr;
3254 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3255 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3256 int32_t colorEffectNumber;
3257 napi_get_value_int32(env, argv[PARAM0], &colorEffectNumber);
3258 ColorEffect colorEffect = (ColorEffect)colorEffectNumber;
3259 cameraSessionNapi->cameraSession_->LockForControl();
3260 cameraSessionNapi->cameraSession_->SetColorEffect(static_cast<ColorEffect>(colorEffect));
3261 cameraSessionNapi->cameraSession_->UnlockForControl();
3262 } else {
3263 MEDIA_ERR_LOG("SetColorEffect call Failed!");
3264 }
3265 return result;
3266 }
3267
GetFocusDistance(napi_env env,napi_callback_info info)3268 napi_value CameraSessionNapi::GetFocusDistance(napi_env env, napi_callback_info info)
3269 {
3270 MEDIA_DEBUG_LOG("GetFocusDistance is called");
3271 napi_status status;
3272 napi_value result = nullptr;
3273 size_t argc = ARGS_ZERO;
3274 napi_value argv[ARGS_ZERO];
3275 napi_value thisVar = nullptr;
3276
3277 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3278
3279 napi_get_undefined(env, &result);
3280 CameraSessionNapi* cameraSessionNapi = nullptr;
3281 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3282 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3283 float distance;
3284 int32_t retCode = cameraSessionNapi->cameraSession_->GetFocusDistance(distance);
3285 if (!CameraNapiUtils::CheckError(env, retCode)) {
3286 return nullptr;
3287 }
3288 napi_create_double(env, distance, &result);
3289 } else {
3290 MEDIA_ERR_LOG("GetFocusDistance call Failed!");
3291 }
3292 return result;
3293 }
3294
SetFocusDistance(napi_env env,napi_callback_info info)3295 napi_value CameraSessionNapi::SetFocusDistance(napi_env env, napi_callback_info info)
3296 {
3297 MEDIA_DEBUG_LOG("SetFocusDistance is called");
3298 CAMERA_SYNC_TRACE;
3299 napi_status status;
3300 napi_value result = nullptr;
3301 size_t argc = ARGS_ONE;
3302 napi_value argv[ARGS_ONE] = {0};
3303 napi_value thisVar = nullptr;
3304
3305 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3306
3307 napi_get_undefined(env, &result);
3308 CameraSessionNapi* cameraSessionNapi = nullptr;
3309 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3310 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3311 double value;
3312 napi_get_value_double(env, argv[PARAM0], &value);
3313 float distance = static_cast<float>(value);
3314 cameraSessionNapi->cameraSession_->LockForControl();
3315 cameraSessionNapi->cameraSession_->SetFocusDistance(distance);
3316 MEDIA_INFO_LOG("CameraSessionNapi::SetFocusDistance set focusDistance:%{public}f!", distance);
3317 cameraSessionNapi->cameraSession_->UnlockForControl();
3318 } else {
3319 MEDIA_ERR_LOG("SetFocusDistance call Failed!");
3320 }
3321 return result;
3322 }
3323
IsMacroSupported(napi_env env,napi_callback_info info)3324 napi_value CameraSessionNapi::IsMacroSupported(napi_env env, napi_callback_info info)
3325 {
3326 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3327 MEDIA_ERR_LOG("SystemApi IsMacroSupported is called!");
3328 return nullptr;
3329 }
3330 MEDIA_DEBUG_LOG("CameraSessionNapi::IsMacroSupported is called");
3331 CameraSessionNapi* cameraSessionNapi = nullptr;
3332 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
3333 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3334 MEDIA_ERR_LOG("CameraSessionNapi::IsMacroSupported parse parameter occur error");
3335 return nullptr;
3336 }
3337 auto result = CameraNapiUtils::GetUndefinedValue(env);
3338 if (cameraSessionNapi->cameraSession_ != nullptr) {
3339 bool isSupported = cameraSessionNapi->cameraSession_->IsMacroSupported();
3340 napi_get_boolean(env, isSupported, &result);
3341 } else {
3342 MEDIA_ERR_LOG("CameraSessionNapi::IsMacroSupported get native object fail");
3343 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
3344 return nullptr;
3345 }
3346 return result;
3347 }
3348
EnableMacro(napi_env env,napi_callback_info info)3349 napi_value CameraSessionNapi::EnableMacro(napi_env env, napi_callback_info info)
3350 {
3351 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3352 MEDIA_ERR_LOG("SystemApi EnableMacro is called!");
3353 return nullptr;
3354 }
3355 MEDIA_DEBUG_LOG("CameraSessionNapi::EnableMacro is called");
3356 bool isEnableMacro;
3357 CameraSessionNapi* cameraSessionNapi = nullptr;
3358 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, isEnableMacro);
3359 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3360 MEDIA_ERR_LOG("CameraSessionNapi::EnableMacro parse parameter occur error");
3361 return nullptr;
3362 }
3363
3364 if (cameraSessionNapi->cameraSession_ != nullptr) {
3365 MEDIA_INFO_LOG("CameraSessionNapi::EnableMacro:%{public}d", isEnableMacro);
3366 cameraSessionNapi->cameraSession_->LockForControl();
3367 int32_t retCode = cameraSessionNapi->cameraSession_->EnableMacro(isEnableMacro);
3368 cameraSessionNapi->cameraSession_->UnlockForControl();
3369 if (!CameraNapiUtils::CheckError(env, retCode)) {
3370 MEDIA_ERR_LOG("CameraSessionNapi::EnableMacro fail %{public}d", retCode);
3371 return nullptr;
3372 }
3373 } else {
3374 MEDIA_ERR_LOG("CameraSessionNapi::EnableMacro get native object fail");
3375 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
3376 return nullptr;
3377 }
3378 return CameraNapiUtils::GetUndefinedValue(env);
3379 }
3380
IsDepthFusionSupported(napi_env env,napi_callback_info info)3381 napi_value CameraSessionNapi::IsDepthFusionSupported(napi_env env, napi_callback_info info)
3382 {
3383 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3384 MEDIA_ERR_LOG("SystemApi GetDepthFusionThreshold is called!");
3385 return nullptr;
3386 }
3387 MEDIA_DEBUG_LOG("CameraSessionNapi::IsDepthFusionSupported is called");
3388 CameraSessionNapi* cameraSessionNapi = nullptr;
3389 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
3390 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3391 MEDIA_ERR_LOG("CameraSessionNapi::IsDepthFusionSupported parse parameter occur error");
3392 return nullptr;
3393 }
3394 auto result = CameraNapiUtils::GetUndefinedValue(env);
3395 if (cameraSessionNapi->cameraSession_ != nullptr) {
3396 bool isSupported = cameraSessionNapi->cameraSession_->IsDepthFusionSupported();
3397 napi_get_boolean(env, isSupported, &result);
3398 return result;
3399 } else {
3400 MEDIA_ERR_LOG("CameraSessionNapi::IsDepthFusionSupported call Failed!");
3401 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
3402 return nullptr;
3403 }
3404 return result;
3405 }
3406
GetDepthFusionThreshold(napi_env env,napi_callback_info info)3407 napi_value CameraSessionNapi::GetDepthFusionThreshold(napi_env env, napi_callback_info info)
3408 {
3409 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3410 MEDIA_ERR_LOG("SystemApi GetDepthFusionThreshold is called!");
3411 return nullptr;
3412 }
3413 MEDIA_DEBUG_LOG("CameraSessionNapi::GetDepthFusionThreshold is called");
3414 CameraSessionNapi* cameraSessionNapi = nullptr;
3415 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
3416 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3417 MEDIA_ERR_LOG("CameraSessionNapi::IsDepthFusionSupported parse parameter occur error");
3418 return nullptr;
3419 }
3420 napi_value result = nullptr;
3421 if (cameraSessionNapi->cameraSession_ != nullptr) {
3422 std::vector<float> vecDepthFusionThreshold;
3423 int32_t retCode = cameraSessionNapi->cameraSession_->GetDepthFusionThreshold(vecDepthFusionThreshold);
3424 if (!CameraNapiUtils::CheckError(env, retCode)) {
3425 return nullptr;
3426 }
3427 MEDIA_INFO_LOG("CameraSessionNapi::GetDepthFusionThreshold len = %{public}zu",
3428 vecDepthFusionThreshold.size());
3429
3430 if (!vecDepthFusionThreshold.empty() && napi_create_array(env, &result) == napi_ok) {
3431 for (size_t i = 0; i < vecDepthFusionThreshold.size(); i++) {
3432 float depthFusion = vecDepthFusionThreshold[i];
3433 napi_value value;
3434 napi_create_double(env, CameraNapiUtils::FloatToDouble(depthFusion), &value);
3435 napi_set_element(env, result, i, value);
3436 }
3437 } else {
3438 MEDIA_ERR_LOG("vecDepthFusionThreshold is empty or failed to create array!");
3439 }
3440 } else {
3441 MEDIA_ERR_LOG("CameraSessionNapi::GetDepthFusionThreshold call Failed!");
3442 return nullptr;
3443 }
3444 return result;
3445 }
3446
IsDepthFusionEnabled(napi_env env,napi_callback_info info)3447 napi_value CameraSessionNapi::IsDepthFusionEnabled(napi_env env, napi_callback_info info)
3448 {
3449 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3450 MEDIA_ERR_LOG("SystemApi IsDepthFusionEnabled is called!");
3451 return nullptr;
3452 }
3453 MEDIA_DEBUG_LOG("CameraSessionNapi::IsDepthFusionEnabled is called");
3454 CameraSessionNapi* cameraSessionNapi = nullptr;
3455 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
3456 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3457 MEDIA_ERR_LOG("CameraSessionNapi::IsDepthFusionEnabled parse parameter occur error");
3458 return nullptr;
3459 }
3460 auto result = CameraNapiUtils::GetUndefinedValue(env);
3461 if (cameraSessionNapi->cameraSession_ != nullptr) {
3462 bool isEnabled = cameraSessionNapi->cameraSession_->IsDepthFusionEnabled();
3463 napi_get_boolean(env, isEnabled, &result);
3464 MEDIA_INFO_LOG("CameraSessionNapi::IsDepthFusionEnabled:%{public}d", isEnabled);
3465 } else {
3466 MEDIA_ERR_LOG("CameraSessionNapi::IsDepthFusionEnabled get native object fail");
3467 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
3468 return nullptr;
3469 }
3470 return result;
3471 }
3472
EnableDepthFusion(napi_env env,napi_callback_info info)3473 napi_value CameraSessionNapi::EnableDepthFusion(napi_env env, napi_callback_info info)
3474 {
3475 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3476 MEDIA_ERR_LOG("SystemApi EnableDepthFusion is called!");
3477 return nullptr;
3478 }
3479 MEDIA_DEBUG_LOG("CameraSessionNapi::EnableDepthFusion is called");
3480 bool isEnabledDepthFusion;
3481 CameraSessionNapi* cameraSessionNapi = nullptr;
3482 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, isEnabledDepthFusion);
3483 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3484 MEDIA_ERR_LOG("CameraSessionNapi::EnabledDepthFusion parse parameter occur error");
3485 return nullptr;
3486 }
3487
3488 if (cameraSessionNapi->cameraSession_ != nullptr) {
3489 MEDIA_INFO_LOG("CameraSessionNapi::EnableDepthFusion:%{public}d", isEnabledDepthFusion);
3490 cameraSessionNapi->cameraSession_->LockForControl();
3491 int32_t retCode = cameraSessionNapi->cameraSession_->EnableDepthFusion(isEnabledDepthFusion);
3492 cameraSessionNapi->cameraSession_->UnlockForControl();
3493 if (!CameraNapiUtils::CheckError(env, retCode)) {
3494 MEDIA_ERR_LOG("CameraSessionNapi::EnableDepthFusion fail %{public}d", retCode);
3495 return nullptr;
3496 }
3497 MEDIA_INFO_LOG("CameraSessionNapi::EnableDepthFusion success");
3498 } else {
3499 MEDIA_ERR_LOG("CameraSessionNapi::EnableDepthFusion get native object fail");
3500 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
3501 return nullptr;
3502 }
3503 return CameraNapiUtils::GetUndefinedValue(env);
3504 }
3505
IsMoonCaptureBoostSupported(napi_env env,napi_callback_info info)3506 napi_value CameraSessionNapi::IsMoonCaptureBoostSupported(napi_env env, napi_callback_info info)
3507 {
3508 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3509 MEDIA_ERR_LOG("SystemApi IsMoonCaptureBoostSupported is called!");
3510 return nullptr;
3511 }
3512 MEDIA_DEBUG_LOG("IsMoonCaptureBoostSupported is called");
3513 napi_status status;
3514 napi_value result = nullptr;
3515 size_t argc = ARGS_ZERO;
3516 napi_value argv[ARGS_ZERO];
3517 napi_value thisVar = nullptr;
3518
3519 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3520
3521 napi_get_undefined(env, &result);
3522 CameraSessionNapi* cameraSessionNapi = nullptr;
3523 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3524 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3525 bool isSupported = cameraSessionNapi->cameraSession_->IsMoonCaptureBoostSupported();
3526 napi_get_boolean(env, isSupported, &result);
3527 } else {
3528 MEDIA_ERR_LOG("IsMoonCaptureBoostSupported call Failed!");
3529 }
3530 return result;
3531 }
3532
EnableMoonCaptureBoost(napi_env env,napi_callback_info info)3533 napi_value CameraSessionNapi::EnableMoonCaptureBoost(napi_env env, napi_callback_info info)
3534 {
3535 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3536 MEDIA_ERR_LOG("SystemApi EnableMoonCaptureBoost is called!");
3537 return nullptr;
3538 }
3539 MEDIA_DEBUG_LOG("EnableMoonCaptureBoost is called");
3540 napi_status status;
3541 napi_value result = nullptr;
3542 size_t argc = ARGS_ONE;
3543 napi_value argv[ARGS_ONE] = { 0 };
3544 napi_value thisVar = nullptr;
3545 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3546 NAPI_ASSERT(env, argc == ARGS_ONE, "requires one parameter");
3547 napi_valuetype valueType = napi_undefined;
3548 napi_typeof(env, argv[0], &valueType);
3549 if (valueType != napi_boolean && !CameraNapiUtils::CheckError(env, INVALID_ARGUMENT)) {
3550 return result;
3551 }
3552 napi_get_undefined(env, &result);
3553 CameraSessionNapi* cameraSessionNapi = nullptr;
3554 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3555 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3556 bool isEnableMoonCaptureBoost;
3557 napi_get_value_bool(env, argv[PARAM0], &isEnableMoonCaptureBoost);
3558 MEDIA_INFO_LOG("CameraSessionNapi::EnableMoonCaptureBoost:%{public}d", isEnableMoonCaptureBoost);
3559 cameraSessionNapi->cameraSession_->LockForControl();
3560 int32_t retCode = cameraSessionNapi->cameraSession_->EnableMoonCaptureBoost(isEnableMoonCaptureBoost);
3561 cameraSessionNapi->cameraSession_->UnlockForControl();
3562 if (retCode != 0 && !CameraNapiUtils::CheckError(env, retCode)) {
3563 return result;
3564 }
3565 }
3566 return result;
3567 }
3568
IsFeatureSupported(napi_env env,napi_callback_info info)3569 napi_value CameraSessionNapi::IsFeatureSupported(napi_env env, napi_callback_info info)
3570 {
3571 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3572 MEDIA_ERR_LOG("SystemApi IsFeatureSupported is called!");
3573 return nullptr;
3574 }
3575 MEDIA_DEBUG_LOG("IsFeatureSupported is called");
3576 int32_t sceneFeature;
3577 CameraSessionNapi* cameraSessionNapi = nullptr;
3578 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, sceneFeature);
3579 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3580 MEDIA_ERR_LOG("CameraSessionNapi::IsFeatureSupported parse parameter occur error");
3581 return nullptr;
3582 }
3583
3584 napi_value result = nullptr;
3585 napi_get_boolean(
3586 env, cameraSessionNapi->cameraSession_->IsFeatureSupported(static_cast<SceneFeature>(sceneFeature)), &result);
3587 return result;
3588 }
3589
EnableFeature(napi_env env,napi_callback_info info)3590 napi_value CameraSessionNapi::EnableFeature(napi_env env, napi_callback_info info)
3591 {
3592 if (!CameraNapiSecurity::CheckSystemApp(env)) {
3593 MEDIA_ERR_LOG("SystemApi EnableFeature is called!");
3594 return nullptr;
3595 }
3596 MEDIA_DEBUG_LOG("EnableFeature is called");
3597 int32_t sceneFeature;
3598 bool isEnable;
3599 CameraSessionNapi* cameraSessionNapi = nullptr;
3600 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, sceneFeature, isEnable);
3601 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3602 MEDIA_ERR_LOG("CameraSessionNapi::EnableFeature parse parameter occur error");
3603 return nullptr;
3604 }
3605
3606 MEDIA_INFO_LOG("CameraSessionNapi::EnableFeature:%{public}d", isEnable);
3607 int32_t retCode =
3608 cameraSessionNapi->cameraSession_->EnableFeature(static_cast<SceneFeature>(sceneFeature), isEnable);
3609 if (!CameraNapiUtils::CheckError(env, retCode)) {
3610 return nullptr;
3611 }
3612
3613 return CameraNapiUtils::GetUndefinedValue(env);
3614 }
3615
CanPreconfig(napi_env env,napi_callback_info info)3616 napi_value CameraSessionNapi::CanPreconfig(napi_env env, napi_callback_info info)
3617 {
3618 MEDIA_DEBUG_LOG("CanPreconfig is called");
3619 size_t argSize = CameraNapiUtils::GetNapiArgs(env, info);
3620 int32_t configType;
3621 int32_t profileSizeRatio = ProfileSizeRatio::UNSPECIFIED;
3622 CameraSessionNapi* cameraSessionNapi = nullptr;
3623 if (argSize == ARGS_ONE) {
3624 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, configType);
3625 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3626 MEDIA_ERR_LOG("CameraSessionNapi::CanPreconfig parse parameter occur error");
3627 return nullptr;
3628 }
3629 } else {
3630 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, configType, profileSizeRatio);
3631 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3632 MEDIA_ERR_LOG("CameraSessionNapi::CanPreconfig parse 2 parameter occur error");
3633 return nullptr;
3634 }
3635 }
3636
3637 MEDIA_INFO_LOG("CameraSessionNapi::CanPreconfig: %{public}d, ratioType:%{public}d", configType, profileSizeRatio);
3638 bool result = cameraSessionNapi->cameraSession_->CanPreconfig(
3639 static_cast<PreconfigType>(configType), static_cast<ProfileSizeRatio>(profileSizeRatio));
3640 return CameraNapiUtils::GetBooleanValue(env, result);
3641 }
3642
Preconfig(napi_env env,napi_callback_info info)3643 napi_value CameraSessionNapi::Preconfig(napi_env env, napi_callback_info info)
3644 {
3645 MEDIA_DEBUG_LOG("Preconfig is called");
3646 size_t argSize = CameraNapiUtils::GetNapiArgs(env, info);
3647 int32_t configType;
3648 int32_t profileSizeRatio = ProfileSizeRatio::UNSPECIFIED;
3649 CameraSessionNapi* cameraSessionNapi = nullptr;
3650 if (argSize == ARGS_ONE) {
3651 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, configType);
3652 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3653 MEDIA_ERR_LOG("CameraSessionNapi::Preconfig parse parameter occur error");
3654 return nullptr;
3655 }
3656 } else {
3657 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, configType, profileSizeRatio);
3658 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
3659 MEDIA_ERR_LOG("CameraSessionNapi::Preconfig parse 2 parameter occur error");
3660 return nullptr;
3661 }
3662 }
3663 int32_t retCode = cameraSessionNapi->cameraSession_->Preconfig(
3664 static_cast<PreconfigType>(configType), static_cast<ProfileSizeRatio>(profileSizeRatio));
3665 if (!CameraNapiUtils::CheckError(env, retCode)) {
3666 return nullptr;
3667 }
3668 return CameraNapiUtils::GetUndefinedValue(env);
3669 }
3670
GetCameraOutputCapabilities(napi_env env,napi_callback_info info)3671 napi_value CameraSessionNapi::GetCameraOutputCapabilities(napi_env env, napi_callback_info info)
3672 {
3673 MEDIA_INFO_LOG("GetCameraOutputCapabilities is called");
3674
3675 size_t argSize = CameraNapiUtils::GetNapiArgs(env, info);
3676 if (argSize != ARGS_ONE) {
3677 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "Invalid argument.");
3678 return nullptr;
3679 }
3680
3681 std::string cameraId;
3682 CameraNapiObject cameraInfoObj { { { "cameraId", &cameraId } } };
3683 CameraSessionNapi* cameraSessionNapi = nullptr;
3684 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, cameraInfoObj);
3685
3686 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "Create cameraInput invalid argument!")) {
3687 MEDIA_ERR_LOG("CameraSessionNapi::GetCameraOutputCapabilities invalid argument");
3688 return nullptr;
3689 }
3690
3691 sptr<CameraDevice> cameraInfo = CameraManager::GetInstance()->GetCameraDeviceFromId(cameraId);
3692 if (cameraInfo == nullptr) {
3693 MEDIA_ERR_LOG("cameraInfo is null");
3694 CameraNapiUtils::ThrowError(env, SERVICE_FATL_ERROR, "cameraInfo is null.");
3695 return nullptr;
3696 }
3697
3698 std::vector<sptr<CameraOutputCapability>> caplist =
3699 cameraSessionNapi->cameraSession_->GetCameraOutputCapabilities(cameraInfo);
3700 if (caplist.empty()) {
3701 MEDIA_ERR_LOG("caplist is empty");
3702 return nullptr;
3703 }
3704
3705 napi_value capArray;
3706 napi_status status = napi_create_array(env, &capArray);
3707 if (status != napi_ok) {
3708 MEDIA_ERR_LOG("Failed to create napi array");
3709 return nullptr;
3710 }
3711
3712 for (size_t i = 0; i < caplist.size(); i++) {
3713 if (caplist[i] == nullptr) {
3714 continue;
3715 }
3716 caplist[i]->RemoveDuplicatesProfiles();
3717 napi_value cap = CameraNapiObjCameraOutputCapability(*caplist[i]).GenerateNapiValue(env);
3718 if (cap == nullptr || napi_set_element(env, capArray, i, cap) != napi_ok) {
3719 MEDIA_ERR_LOG("Failed to create camera napi wrapper object");
3720 return nullptr;
3721 }
3722 }
3723
3724 return capArray;
3725 }
3726
ParseSize(napi_env env,napi_value root,Size & size)3727 void ParseSize(napi_env env, napi_value root, Size& size)
3728 {
3729 MEDIA_DEBUG_LOG("ParseSize is called");
3730 napi_value res = nullptr;
3731 if (napi_get_named_property(env, root, "width", &res) == napi_ok) {
3732 napi_get_value_uint32(env, res, &size.width);
3733 }
3734
3735 if (napi_get_named_property(env, root, "height", &res) == napi_ok) {
3736 napi_get_value_uint32(env, res, &size.height);
3737 }
3738 }
3739
ParseProfile(napi_env env,napi_value root,Profile & profile)3740 void ParseProfile(napi_env env, napi_value root, Profile& profile)
3741 {
3742 MEDIA_DEBUG_LOG("ParseProfile is called");
3743 napi_value res = nullptr;
3744
3745 if (napi_get_named_property(env, root, "size", &res) == napi_ok) {
3746 ParseSize(env, res, profile.size_);
3747 }
3748
3749 int32_t intValue = 0;
3750 if (napi_get_named_property(env, root, "format", &res) == napi_ok) {
3751 napi_get_value_int32(env, res, &intValue);
3752 profile.format_ = static_cast<CameraFormat>(intValue);
3753 }
3754 }
3755
ParseVideoProfile(napi_env env,napi_value root,VideoProfile & profile)3756 void ParseVideoProfile(napi_env env, napi_value root, VideoProfile& profile)
3757 {
3758 MEDIA_DEBUG_LOG("ParseVideoProfile is called");
3759 napi_value res = nullptr;
3760
3761 if (napi_get_named_property(env, root, "size", &res) == napi_ok) {
3762 ParseSize(env, res, profile.size_);
3763 }
3764
3765 int32_t intValue = 0;
3766 if (napi_get_named_property(env, root, "format", &res) == napi_ok) {
3767 napi_get_value_int32(env, res, &intValue);
3768 profile.format_ = static_cast<CameraFormat>(intValue);
3769 }
3770
3771 if (napi_get_named_property(env, root, "frameRateRange", &res) == napi_ok) {
3772 const int32_t LENGTH = 2;
3773 std::vector<int32_t> rateRanges(LENGTH);
3774 napi_value value;
3775
3776 if (napi_get_named_property(env, res, "min", &value) == napi_ok) {
3777 napi_get_value_int32(env, value, &rateRanges[0]);
3778 }
3779 if (napi_get_named_property(env, res, "max", &value) == napi_ok) {
3780 napi_get_value_int32(env, value, &rateRanges[1]);
3781 }
3782 profile.framerates_ = rateRanges;
3783 }
3784 }
3785
3786
ParseProfileList(napi_env env,napi_value arrayParam,std::vector<Profile> & profiles)3787 void ParseProfileList(napi_env env, napi_value arrayParam, std::vector<Profile> &profiles)
3788 {
3789 uint32_t length = 0;
3790 napi_get_array_length(env, arrayParam, &length);
3791 for (uint32_t i = 0; i < length; ++i) {
3792 napi_value value;
3793 napi_get_element(env, arrayParam, i, &value);
3794 Profile profile; // 在栈上创建 Profile 对象
3795 ParseProfile(env, value, profile);
3796 profiles.push_back(profile);
3797 }
3798 }
3799
ParseVideoProfileList(napi_env env,napi_value arrayParam,std::vector<VideoProfile> & profiles)3800 void ParseVideoProfileList(napi_env env, napi_value arrayParam, std::vector<VideoProfile> &profiles)
3801 {
3802 uint32_t length = 0;
3803 napi_get_array_length(env, arrayParam, &length);
3804 for (uint32_t i = 0; i < length; ++i) {
3805 napi_value value;
3806 napi_get_element(env, arrayParam, i, &value);
3807 VideoProfile profile;
3808 ParseVideoProfile(env, value, profile);
3809 profiles.push_back(profile);
3810 }
3811 }
3812
ParseCameraOutputCapability(napi_env env,napi_value root,std::vector<Profile> & previewProfiles,std::vector<Profile> & photoProfiles,std::vector<VideoProfile> & videoProfiles)3813 void ParseCameraOutputCapability(napi_env env, napi_value root,
3814 std::vector<Profile>& previewProfiles,
3815 std::vector<Profile>& photoProfiles,
3816 std::vector<VideoProfile>& videoProfiles)
3817 {
3818 previewProfiles.clear();
3819 photoProfiles.clear();
3820 videoProfiles.clear();
3821 napi_value res = nullptr;
3822
3823 if (napi_get_named_property(env, root, "previewProfiles", &res) == napi_ok) {
3824 ParseProfileList(env, res, previewProfiles);
3825 }
3826 if (napi_get_named_property(env, root, "photoProfiles", &res) == napi_ok) {
3827 ParseProfileList(env, res, photoProfiles);
3828 }
3829 if (napi_get_named_property(env, root, "videoProfiles", &res) == napi_ok) {
3830 ParseVideoProfileList(env, res, videoProfiles);
3831 }
3832 }
3833
GetSessionFunctions(napi_env env,napi_callback_info info)3834 napi_value CameraSessionNapi::GetSessionFunctions(napi_env env, napi_callback_info info)
3835 {
3836 MEDIA_INFO_LOG("GetSessionFunctions is called");
3837 napi_status status;
3838 napi_value result = nullptr;
3839 size_t argc = ARGS_ONE;
3840 napi_value argv[ARGS_ONE] = {0};
3841 napi_value thisVar = nullptr;
3842
3843 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3844
3845 std::vector<Profile> previewProfiles;
3846 std::vector<Profile> photoProfiles;
3847 std::vector<VideoProfile> videoProfiles;
3848 ParseCameraOutputCapability(env, argv[PARAM0], previewProfiles, photoProfiles, videoProfiles);
3849 CameraSessionNapi* cameraSessionNapi = nullptr;
3850 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3851 if (status != napi_ok || cameraSessionNapi == nullptr) {
3852 MEDIA_ERR_LOG("napi_unwrap failure!");
3853 return nullptr;
3854 }
3855
3856 auto session = cameraSessionNapi->cameraSession_;
3857 SceneMode mode = session->GetMode();
3858 auto cameraFunctionsList = session->GetSessionFunctions(previewProfiles, photoProfiles, videoProfiles);
3859 auto it = modeToFunctionTypeMap_.find(mode);
3860 if (it != modeToFunctionTypeMap_.end()) {
3861 result = CreateFunctionsJSArray(env, cameraFunctionsList, it->second);
3862 } else {
3863 MEDIA_ERR_LOG("GetSessionFunctions failed due to unsupported mode: %{public}d", mode);
3864 }
3865 return result;
3866 }
3867
GetSessionConflictFunctions(napi_env env,napi_callback_info info)3868 napi_value CameraSessionNapi::GetSessionConflictFunctions(napi_env env, napi_callback_info info)
3869 {
3870 MEDIA_INFO_LOG("GetSessionConflictFunctions is called");
3871 napi_status status;
3872 napi_value result = nullptr;
3873 size_t argc = ARGS_ONE;
3874 napi_value argv[ARGS_ONE];
3875 napi_value thisVar = nullptr;
3876
3877 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3878
3879 napi_get_undefined(env, &result);
3880
3881 CameraSessionNapi* cameraSessionNapi = nullptr;
3882 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3883 if (status != napi_ok || cameraSessionNapi == nullptr) {
3884 MEDIA_ERR_LOG("napi_unwrap failure!");
3885 return nullptr;
3886 }
3887
3888 auto session = cameraSessionNapi->cameraSession_;
3889 SceneMode mode = session->GetMode();
3890 auto conflictFunctionsList = session->GetSessionConflictFunctions();
3891 auto it = modeToConflictFunctionTypeMap_.find(mode);
3892 if (it != modeToConflictFunctionTypeMap_.end()) {
3893 result = CreateFunctionsJSArray(env, conflictFunctionsList, it->second);
3894 } else {
3895 MEDIA_ERR_LOG("GetSessionConflictFunctions failed due to unsupported mode: %{public}d", mode);
3896 }
3897 return result;
3898 }
3899
CreateFunctionsJSArray(napi_env env,std::vector<sptr<CameraAbility>> functionsList,FunctionsType type)3900 napi_value CameraSessionNapi::CreateFunctionsJSArray(
3901 napi_env env, std::vector<sptr<CameraAbility>> functionsList, FunctionsType type)
3902 {
3903 MEDIA_DEBUG_LOG("CreateFunctionsJSArray is called");
3904 napi_value functionsArray = nullptr;
3905 napi_value functions = nullptr;
3906 napi_status status;
3907
3908 if (functionsList.empty()) {
3909 MEDIA_ERR_LOG("functionsList is empty");
3910 }
3911
3912 status = napi_create_array(env, &functionsArray);
3913 if (status != napi_ok) {
3914 MEDIA_ERR_LOG("napi_create_array failed");
3915 return functionsArray;
3916 }
3917
3918 size_t j = 0;
3919 for (size_t i = 0; i < functionsList.size(); i++) {
3920 functions = CameraFunctionsNapi::CreateCameraFunctions(env, functionsList[i], type);
3921 if ((functions == nullptr) || napi_set_element(env, functionsArray, j++, functions) != napi_ok) {
3922 MEDIA_ERR_LOG("failed to create functions object napi wrapper object");
3923 return nullptr;
3924 }
3925 }
3926 MEDIA_INFO_LOG("create functions count = %{public}zu", j);
3927 return functionsArray;
3928 }
3929
IsEffectSuggestionSupported(napi_env env,napi_callback_info info)3930 napi_value CameraSessionNapi::IsEffectSuggestionSupported(napi_env env, napi_callback_info info)
3931 {
3932 if (!CameraNapiSecurity::CheckSystemApp(env, false)) {
3933 MEDIA_ERR_LOG("SystemApi IsEffectSuggestionSupported is called!");
3934 return nullptr;
3935 }
3936 MEDIA_DEBUG_LOG("IsEffectSuggestionSupported is called");
3937 napi_status status;
3938 napi_value result = nullptr;
3939 size_t argc = ARGS_ZERO;
3940 napi_value argv[ARGS_ZERO];
3941 napi_value thisVar = nullptr;
3942
3943 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3944
3945 napi_get_undefined(env, &result);
3946 CameraSessionNapi* cameraSessionNapi = nullptr;
3947 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3948 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3949 bool isEffectSuggestionSupported = cameraSessionNapi->cameraSession_->IsEffectSuggestionSupported();
3950 napi_get_boolean(env, isEffectSuggestionSupported, &result);
3951 } else {
3952 MEDIA_ERR_LOG("IsEffectSuggestionSupported call Failed!");
3953 }
3954 return result;
3955 }
3956
EnableEffectSuggestion(napi_env env,napi_callback_info info)3957 napi_value CameraSessionNapi::EnableEffectSuggestion(napi_env env, napi_callback_info info)
3958 {
3959 if (!CameraNapiSecurity::CheckSystemApp(env, false)) {
3960 MEDIA_ERR_LOG("SystemApi EnableEffectSuggestion is called!");
3961 return nullptr;
3962 }
3963 MEDIA_DEBUG_LOG("EnableEffectSuggestion is called");
3964 napi_status status;
3965 napi_value result = nullptr;
3966 size_t argc = ARGS_ONE;
3967 napi_value argv[ARGS_ONE] = { 0 };
3968 napi_value thisVar = nullptr;
3969 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
3970 NAPI_ASSERT(env, argc == ARGS_ONE, "requires one parameter");
3971 napi_valuetype valueType = napi_undefined;
3972 napi_typeof(env, argv[0], &valueType);
3973 if (valueType != napi_boolean && !CameraNapiUtils::CheckError(env, INVALID_ARGUMENT)) {
3974 return result;
3975 }
3976 napi_get_undefined(env, &result);
3977 CameraSessionNapi* cameraSessionNapi = nullptr;
3978 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
3979 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
3980 bool enabled;
3981 napi_get_value_bool(env, argv[PARAM0], &enabled);
3982 MEDIA_INFO_LOG("CameraSessionNapi::EnableEffectSuggestion:%{public}d", enabled);
3983 cameraSessionNapi->cameraSession_->LockForControl();
3984 int32_t retCode = cameraSessionNapi->cameraSession_->EnableEffectSuggestion(enabled);
3985 cameraSessionNapi->cameraSession_->UnlockForControl();
3986 if (retCode != 0 && !CameraNapiUtils::CheckError(env, retCode)) {
3987 return result;
3988 }
3989 }
3990 return result;
3991 }
3992
GetSupportedEffectSuggestionType(napi_env env,napi_callback_info info)3993 napi_value CameraSessionNapi::GetSupportedEffectSuggestionType(napi_env env, napi_callback_info info)
3994 {
3995 if (!CameraNapiSecurity::CheckSystemApp(env, false)) {
3996 MEDIA_ERR_LOG("SystemApi GetSupportedEffectSuggestionType is called!");
3997 return nullptr;
3998 }
3999 MEDIA_DEBUG_LOG("GetSupportedEffectSuggestionType is called");
4000 napi_status status;
4001 napi_value result = nullptr;
4002 size_t argc = ARGS_ZERO;
4003 napi_value argv[ARGS_ZERO];
4004 napi_value thisVar = nullptr;
4005
4006 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4007
4008 napi_get_undefined(env, &result);
4009 status = napi_create_array(env, &result);
4010 if (status != napi_ok) {
4011 MEDIA_ERR_LOG("napi_create_array call Failed!");
4012 return result;
4013 }
4014 CameraSessionNapi* cameraSessionNapi = nullptr;
4015 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4016 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4017 std::vector<EffectSuggestionType> effectSuggestionTypeList =
4018 cameraSessionNapi->cameraSession_->GetSupportedEffectSuggestionType();
4019 if (!effectSuggestionTypeList.empty()) {
4020 for (size_t i = 0; i < effectSuggestionTypeList.size(); i++) {
4021 int type = effectSuggestionTypeList[i];
4022 napi_value value;
4023 napi_create_int32(env, type, &value);
4024 napi_set_element(env, result, i, value);
4025 }
4026 }
4027 } else {
4028 MEDIA_ERR_LOG("GetSupportedEffectSuggestionType call Failed!");
4029 }
4030 return result;
4031 }
4032
ParseEffectSuggestionStatus(napi_env env,napi_value arrayParam,std::vector<EffectSuggestionStatus> & effectSuggestionStatusList)4033 static void ParseEffectSuggestionStatus(napi_env env, napi_value arrayParam,
4034 std::vector<EffectSuggestionStatus> &effectSuggestionStatusList)
4035 {
4036 MEDIA_DEBUG_LOG("ParseEffectSuggestionStatus is called");
4037 uint32_t length = 0;
4038 napi_value value;
4039 napi_get_array_length(env, arrayParam, &length);
4040 for (uint32_t i = 0; i < length; i++) {
4041 napi_get_element(env, arrayParam, i, &value);
4042 napi_value res = nullptr;
4043 EffectSuggestionStatus effectSuggestionStatus;
4044 int32_t intValue = 0;
4045 if (napi_get_named_property(env, value, "type", &res) == napi_ok) {
4046 napi_get_value_int32(env, res, &intValue);
4047 effectSuggestionStatus.type = static_cast<EffectSuggestionType>(intValue);
4048 }
4049 bool enabled = false;
4050 if (napi_get_named_property(env, value, "status", &res) == napi_ok) {
4051 napi_get_value_bool(env, res, &enabled);
4052 effectSuggestionStatus.status = enabled;
4053 }
4054 effectSuggestionStatusList.push_back(effectSuggestionStatus);
4055 }
4056 }
4057
SetEffectSuggestionStatus(napi_env env,napi_callback_info info)4058 napi_value CameraSessionNapi::SetEffectSuggestionStatus(napi_env env, napi_callback_info info)
4059 {
4060 if (!CameraNapiSecurity::CheckSystemApp(env, false)) {
4061 MEDIA_ERR_LOG("SystemApi SetEffectSuggestionStatus is called!");
4062 return nullptr;
4063 }
4064 MEDIA_INFO_LOG("SetEffectSuggestionStatus is called");
4065 napi_status status;
4066 napi_value result = nullptr;
4067 size_t argc = ARGS_ONE;
4068 napi_value argv[ARGS_ONE] = {0};
4069 napi_value thisVar = nullptr;
4070
4071 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4072
4073 std::vector<EffectSuggestionStatus> effectSuggestionStatusList;
4074 ParseEffectSuggestionStatus(env, argv[PARAM0], effectSuggestionStatusList);
4075
4076 napi_get_undefined(env, &result);
4077 CameraSessionNapi* cameraSessionNapi = nullptr;
4078 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4079 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4080 cameraSessionNapi->cameraSession_->LockForControl();
4081 int32_t retCode = cameraSessionNapi->cameraSession_->SetEffectSuggestionStatus(effectSuggestionStatusList);
4082 cameraSessionNapi->cameraSession_->UnlockForControl();
4083 if (retCode != 0 && !CameraNapiUtils::CheckError(env, retCode)) {
4084 return result;
4085 }
4086 }
4087 return result;
4088 }
4089
UpdateEffectSuggestion(napi_env env,napi_callback_info info)4090 napi_value CameraSessionNapi::UpdateEffectSuggestion(napi_env env, napi_callback_info info)
4091 {
4092 if (!CameraNapiSecurity::CheckSystemApp(env, false)) {
4093 MEDIA_ERR_LOG("SystemApi UpdateEffectSuggestion is called!");
4094 return nullptr;
4095 }
4096 MEDIA_DEBUG_LOG("UpdateEffectSuggestion is called");
4097 napi_status status;
4098 napi_value result = nullptr;
4099 size_t argc = ARGS_TWO;
4100 napi_value argv[ARGS_TWO] = { 0, 0 };
4101 napi_value thisVar = nullptr;
4102 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4103 NAPI_ASSERT(env, argc == ARGS_TWO, "requires two parameter");
4104 napi_valuetype valueType = napi_undefined;
4105 napi_typeof(env, argv[PARAM0], &valueType);
4106 if (valueType != napi_number && !CameraNapiUtils::CheckError(env, INVALID_ARGUMENT)) {
4107 return result;
4108 }
4109 napi_typeof(env, argv[PARAM1], &valueType);
4110 if (valueType != napi_boolean && !CameraNapiUtils::CheckError(env, INVALID_ARGUMENT)) {
4111 return result;
4112 }
4113
4114 napi_get_undefined(env, &result);
4115 CameraSessionNapi* cameraSessionNapi = nullptr;
4116 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4117 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4118 int32_t value;
4119 napi_get_value_int32(env, argv[PARAM0], &value);
4120 auto effectSuggestionType = (EffectSuggestionType)value;
4121 bool enabled;
4122 napi_get_value_bool(env, argv[PARAM1], &enabled);
4123 MEDIA_INFO_LOG("CameraSessionNapi::UpdateEffectSuggestion:%{public}d enabled:%{public}d",
4124 effectSuggestionType, enabled);
4125 cameraSessionNapi->cameraSession_->LockForControl();
4126 int32_t retCode = cameraSessionNapi->cameraSession_->UpdateEffectSuggestion(effectSuggestionType, enabled);
4127 cameraSessionNapi->cameraSession_->UnlockForControl();
4128 if (retCode != 0 && !CameraNapiUtils::CheckError(env, retCode)) {
4129 return result;
4130 }
4131 }
4132 return result;
4133 }
4134
4135 // ------------------------------------------------auto_awb_props-------------------------------------------------------
GetSupportedWhiteBalanceModes(napi_env env,napi_callback_info info)4136 napi_value CameraSessionNapi::GetSupportedWhiteBalanceModes(napi_env env, napi_callback_info info)
4137 {
4138 MEDIA_DEBUG_LOG("GetSupportedWhiteBalanceModes is called");
4139 napi_status status;
4140 napi_value result = nullptr;
4141 size_t argc = ARGS_ZERO;
4142 napi_value argv[ARGS_ZERO];
4143 napi_value thisVar = nullptr;
4144
4145 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4146
4147 napi_get_undefined(env, &result);
4148 status = napi_create_array(env, &result);
4149 if (status != napi_ok) {
4150 MEDIA_ERR_LOG("napi_create_array call Failed!");
4151 return result;
4152 }
4153 CameraSessionNapi* cameraSessionNapi = nullptr;
4154 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4155 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4156 std::vector<WhiteBalanceMode> whiteBalanceModes;
4157 int32_t retCode = cameraSessionNapi->cameraSession_->GetSupportedWhiteBalanceModes(whiteBalanceModes);
4158 if (!CameraNapiUtils::CheckError(env, retCode)) {
4159 return nullptr;
4160 }
4161
4162 MEDIA_INFO_LOG("ProfessionSessionNapi::GetSupportedWhiteBalanceModes len = %{public}zu",
4163 whiteBalanceModes.size());
4164 if (!whiteBalanceModes.empty()) {
4165 for (size_t i = 0; i < whiteBalanceModes.size(); i++) {
4166 WhiteBalanceMode whiteBalanceMode = whiteBalanceModes[i];
4167 napi_value value;
4168 napi_create_int32(env, whiteBalanceMode, &value);
4169 napi_set_element(env, result, i, value);
4170 }
4171 }
4172 } else {
4173 MEDIA_ERR_LOG("GetSupportedWhiteBalanceModes call Failed!");
4174 }
4175 return result;
4176 }
4177
IsWhiteBalanceModeSupported(napi_env env,napi_callback_info info)4178 napi_value CameraSessionNapi::IsWhiteBalanceModeSupported(napi_env env, napi_callback_info info)
4179 {
4180 MEDIA_DEBUG_LOG("IsWhiteBalanceModeSupported is called");
4181 napi_status status;
4182 napi_value result = nullptr;
4183 size_t argc = ARGS_ONE;
4184 napi_value argv[ARGS_ONE] = {0};
4185 napi_value thisVar = nullptr;
4186
4187 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4188
4189 napi_get_undefined(env, &result);
4190 CameraSessionNapi* cameraSessionNapi = nullptr;
4191 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4192 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4193 int32_t value;
4194 napi_get_value_int32(env, argv[PARAM0], &value);
4195 WhiteBalanceMode mode = (WhiteBalanceMode)value;
4196 bool isSupported;
4197 int32_t retCode = cameraSessionNapi->cameraSession_->IsWhiteBalanceModeSupported(mode, isSupported);
4198 if (!CameraNapiUtils::CheckError(env, retCode)) {
4199 return nullptr;
4200 }
4201 napi_get_boolean(env, isSupported, &result);
4202 } else {
4203 MEDIA_ERR_LOG("IsWhiteBalanceModeSupported call Failed!");
4204 }
4205 return result;
4206 }
4207
GetWhiteBalanceMode(napi_env env,napi_callback_info info)4208 napi_value CameraSessionNapi::GetWhiteBalanceMode(napi_env env, napi_callback_info info)
4209 {
4210 MEDIA_DEBUG_LOG("GetWhiteBalanceMode is called");
4211 napi_status status;
4212 napi_value result = nullptr;
4213 size_t argc = ARGS_ZERO;
4214 napi_value argv[ARGS_ZERO];
4215 napi_value thisVar = nullptr;
4216
4217 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4218
4219 napi_get_undefined(env, &result);
4220 CameraSessionNapi* cameraSessionNapi = nullptr;
4221 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4222 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4223 WhiteBalanceMode whiteBalanceMode;
4224 int32_t retCode = cameraSessionNapi->cameraSession_->GetWhiteBalanceMode(whiteBalanceMode);
4225 if (!CameraNapiUtils::CheckError(env, retCode)) {
4226 return nullptr;
4227 }
4228 napi_create_int32(env, whiteBalanceMode, &result);
4229 } else {
4230 MEDIA_ERR_LOG("GetWhiteBalanceMode call Failed!");
4231 }
4232 return result;
4233 }
4234
SetWhiteBalanceMode(napi_env env,napi_callback_info info)4235 napi_value CameraSessionNapi::SetWhiteBalanceMode(napi_env env, napi_callback_info info)
4236 {
4237 MEDIA_DEBUG_LOG("SetWhiteBalanceMode is called");
4238 CAMERA_SYNC_TRACE;
4239 napi_status status;
4240 napi_value result = nullptr;
4241 size_t argc = ARGS_ONE;
4242 napi_value argv[ARGS_ONE] = {0};
4243 napi_value thisVar = nullptr;
4244
4245 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4246
4247 napi_get_undefined(env, &result);
4248 CameraSessionNapi* cameraSessionNapi = nullptr;
4249 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4250 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4251 int32_t value;
4252 napi_get_value_int32(env, argv[PARAM0], &value);
4253 WhiteBalanceMode mode = (WhiteBalanceMode)value;
4254 cameraSessionNapi->cameraSession_->LockForControl();
4255 cameraSessionNapi->cameraSession_->SetWhiteBalanceMode(mode);
4256 MEDIA_INFO_LOG("ProfessionSessionNapi::SetWhiteBalanceMode set mode:%{public}d", value);
4257 cameraSessionNapi->cameraSession_->UnlockForControl();
4258 } else {
4259 MEDIA_ERR_LOG("SetWhiteBalanceMode call Failed!");
4260 }
4261 return result;
4262 }
4263
4264 // -----------------------------------------------manual_awb_props------------------------------------------------------
GetManualWhiteBalanceRange(napi_env env,napi_callback_info info)4265 napi_value CameraSessionNapi::GetManualWhiteBalanceRange(napi_env env, napi_callback_info info)
4266 {
4267 MEDIA_DEBUG_LOG("GetManualWhiteBalanceRange is called");
4268 napi_status status;
4269 napi_value result = nullptr;
4270 size_t argc = ARGS_ZERO;
4271 napi_value argv[ARGS_ZERO];
4272 napi_value thisVar = nullptr;
4273
4274 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4275
4276 napi_get_undefined(env, &result);
4277
4278 CameraSessionNapi* cameraSessionNapi = nullptr;
4279 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4280 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4281 std::vector<int32_t> whiteBalanceRange = {};
4282 int32_t retCode = cameraSessionNapi->cameraSession_->GetManualWhiteBalanceRange(whiteBalanceRange);
4283 if (!CameraNapiUtils::CheckError(env, retCode)) {
4284 return nullptr;
4285 }
4286 MEDIA_INFO_LOG("ProfessionSessionNapi::GetManualWhiteBalanceRange len = %{public}zu", whiteBalanceRange.size());
4287
4288 if (!whiteBalanceRange.empty() && napi_create_array(env, &result) == napi_ok) {
4289 for (size_t i = 0; i < whiteBalanceRange.size(); i++) {
4290 int32_t iso = whiteBalanceRange[i];
4291 napi_value value;
4292 napi_create_int32(env, iso, &value);
4293 napi_set_element(env, result, i, value);
4294 }
4295 } else {
4296 MEDIA_ERR_LOG("whiteBalanceRange is empty or failed to create array!");
4297 }
4298 } else {
4299 MEDIA_ERR_LOG("GetManualWhiteBalanceRange call Failed!");
4300 }
4301 return result;
4302 }
4303
IsManualWhiteBalanceSupported(napi_env env,napi_callback_info info)4304 napi_value CameraSessionNapi::IsManualWhiteBalanceSupported(napi_env env, napi_callback_info info)
4305 {
4306 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4307 MEDIA_ERR_LOG("SystemApi IsManualIsoSupported is called!");
4308 return nullptr;
4309 }
4310 MEDIA_DEBUG_LOG("IsManualIsoSupported is called");
4311 napi_status status;
4312 napi_value result = nullptr;
4313 size_t argc = ARGS_ZERO;
4314 napi_value argv[ARGS_ZERO];
4315 napi_value thisVar = nullptr;
4316
4317 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4318
4319 napi_get_undefined(env, &result);
4320 CameraSessionNapi* cameraSessionNapi = nullptr;
4321 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4322 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4323 bool isSupported;
4324 int32_t retCode = cameraSessionNapi->cameraSession_->IsManualWhiteBalanceSupported(isSupported);
4325 if (!CameraNapiUtils::CheckError(env, retCode)) {
4326 return nullptr;
4327 }
4328 napi_get_boolean(env, isSupported, &result);
4329 } else {
4330 MEDIA_ERR_LOG("IsManualIsoSupported call Failed!");
4331 }
4332 return result;
4333 }
4334
GetManualWhiteBalance(napi_env env,napi_callback_info info)4335 napi_value CameraSessionNapi::GetManualWhiteBalance(napi_env env, napi_callback_info info)
4336 {
4337 MEDIA_DEBUG_LOG("GetISO is called");
4338 napi_status status;
4339 napi_value result = nullptr;
4340 size_t argc = ARGS_ZERO;
4341 napi_value argv[ARGS_ZERO];
4342 napi_value thisVar = nullptr;
4343
4344 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4345
4346 napi_get_undefined(env, &result);
4347 CameraSessionNapi* cameraSessionNapi = nullptr;
4348 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4349 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4350 int32_t wbValue;
4351 int32_t retCode = cameraSessionNapi->cameraSession_->GetManualWhiteBalance(wbValue);
4352 if (!CameraNapiUtils::CheckError(env, retCode)) {
4353 return nullptr;
4354 }
4355 napi_create_int32(env, wbValue, &result);
4356 } else {
4357 MEDIA_ERR_LOG("GetISO call Failed!");
4358 }
4359 return result;
4360 }
4361
SetManualWhiteBalance(napi_env env,napi_callback_info info)4362 napi_value CameraSessionNapi::SetManualWhiteBalance(napi_env env, napi_callback_info info)
4363 {
4364 MEDIA_DEBUG_LOG("SetManualWhiteBalance is called");
4365 CAMERA_SYNC_TRACE;
4366 napi_status status;
4367 napi_value result = nullptr;
4368 size_t argc = ARGS_ONE;
4369 napi_value argv[ARGS_ONE] = {0};
4370 napi_value thisVar = nullptr;
4371
4372 CAMERA_NAPI_GET_JS_ARGS(env, info, argc, argv, thisVar);
4373
4374 napi_get_undefined(env, &result);
4375 CameraSessionNapi* cameraSessionNapi = nullptr;
4376 status = napi_unwrap(env, thisVar, reinterpret_cast<void**>(&cameraSessionNapi));
4377 if (status == napi_ok && cameraSessionNapi != nullptr && cameraSessionNapi->cameraSession_ != nullptr) {
4378 int32_t wbValue;
4379 napi_get_value_int32(env, argv[PARAM0], &wbValue);
4380 cameraSessionNapi->cameraSession_->LockForControl();
4381 cameraSessionNapi->cameraSession_->SetManualWhiteBalance(wbValue);
4382 MEDIA_INFO_LOG("ProfessionSessionNapi::SetManualWhiteBalance set wbValue:%{public}d", wbValue);
4383 cameraSessionNapi->cameraSession_->UnlockForControl();
4384 } else {
4385 MEDIA_ERR_LOG("SetManualWhiteBalance call Failed!");
4386 }
4387 return result;
4388 }
4389
GetSupportedVirtualApertures(napi_env env,napi_callback_info info)4390 napi_value CameraSessionNapi::GetSupportedVirtualApertures(napi_env env, napi_callback_info info)
4391 {
4392 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4393 MEDIA_ERR_LOG("SystemApi GetSupportedVirtualApertures is called!");
4394 return nullptr;
4395 }
4396 MEDIA_DEBUG_LOG("GetSupportedVirtualApertures is called");
4397 CameraSessionNapi* cameraSessionNapi = nullptr;
4398 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
4399 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
4400 MEDIA_ERR_LOG("CameraSessionNapi::GetSupportedVirtualApertures parse parameter occur error");
4401 return nullptr;
4402 }
4403
4404 napi_status status;
4405 napi_value result = nullptr;
4406 status = napi_create_array(env, &result);
4407 if (status != napi_ok) {
4408 MEDIA_ERR_LOG("napi_create_array call Failed!");
4409 return nullptr;
4410 }
4411
4412 if (cameraSessionNapi->cameraSession_ != nullptr) {
4413 std::vector<float> virtualApertures = {};
4414 int32_t retCode = cameraSessionNapi->cameraSession_->GetSupportedVirtualApertures(virtualApertures);
4415 MEDIA_INFO_LOG("GetSupportedVirtualApertures virtualApertures len = %{public}zu", virtualApertures.size());
4416 if (!CameraNapiUtils::CheckError(env, retCode)) {
4417 return nullptr;
4418 }
4419 if (!virtualApertures.empty()) {
4420 for (size_t i = 0; i < virtualApertures.size(); i++) {
4421 float virtualAperture = virtualApertures[i];
4422 napi_value value;
4423 napi_create_double(env, CameraNapiUtils::FloatToDouble(virtualAperture), &value);
4424 napi_set_element(env, result, i, value);
4425 }
4426 }
4427 } else {
4428 MEDIA_ERR_LOG("GetSupportedVirtualApertures call Failed!");
4429 }
4430 return result;
4431 }
4432
GetVirtualAperture(napi_env env,napi_callback_info info)4433 napi_value CameraSessionNapi::GetVirtualAperture(napi_env env, napi_callback_info info)
4434 {
4435 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4436 MEDIA_ERR_LOG("SystemApi GetVirtualAperture is called!");
4437 return nullptr;
4438 }
4439 MEDIA_DEBUG_LOG("GetVirtualAperture is called");
4440 CameraSessionNapi* cameraSessionNapi = nullptr;
4441 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
4442 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
4443 MEDIA_ERR_LOG("CameraSessionNapi::GetVirtualAperture parse parameter occur error");
4444 return nullptr;
4445 }
4446 if (cameraSessionNapi->cameraSession_ != nullptr) {
4447 float virtualAperture;
4448 int32_t retCode = cameraSessionNapi->cameraSession_->GetVirtualAperture(virtualAperture);
4449 if (!CameraNapiUtils::CheckError(env, retCode)) {
4450 return nullptr;
4451 }
4452 napi_value result;
4453 napi_create_double(env, CameraNapiUtils::FloatToDouble(virtualAperture), &result);
4454 return result;
4455 } else {
4456 MEDIA_ERR_LOG("GetVirtualAperture call Failed!");
4457 }
4458 return CameraNapiUtils::GetUndefinedValue(env);
4459 }
4460
SetVirtualAperture(napi_env env,napi_callback_info info)4461 napi_value CameraSessionNapi::SetVirtualAperture(napi_env env, napi_callback_info info)
4462 {
4463 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4464 MEDIA_ERR_LOG("SystemApi SetVirtualAperture is called!");
4465 return nullptr;
4466 }
4467 MEDIA_DEBUG_LOG("SetVirtualAperture is called");
4468 double virtualAperture;
4469 CameraSessionNapi* cameraSessionNapi = nullptr;
4470 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, virtualAperture);
4471 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
4472 MEDIA_ERR_LOG("CameraSessionNapi::SetVirtualAperture parse parameter occur error");
4473 return nullptr;
4474 }
4475 if (cameraSessionNapi->cameraSession_ != nullptr) {
4476 cameraSessionNapi->cameraSession_->LockForControl();
4477 int32_t retCode = cameraSessionNapi->cameraSession_->SetVirtualAperture((float)virtualAperture);
4478 MEDIA_INFO_LOG("SetVirtualAperture set virtualAperture %{public}f!", virtualAperture);
4479 cameraSessionNapi->cameraSession_->UnlockForControl();
4480 CHECK_ERROR_RETURN_RET(!CameraNapiUtils::CheckError(env, retCode), nullptr);
4481 } else {
4482 MEDIA_ERR_LOG("SetVirtualAperture call Failed!");
4483 }
4484 return CameraNapiUtils::GetUndefinedValue(env);
4485 }
4486
GetSupportedPhysicalApertures(napi_env env,napi_callback_info info)4487 napi_value CameraSessionNapi::GetSupportedPhysicalApertures(napi_env env, napi_callback_info info)
4488 {
4489 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4490 MEDIA_ERR_LOG("SystemApi GetSupportedPhysicalApertures is called!");
4491 return nullptr;
4492 }
4493 MEDIA_DEBUG_LOG("GetSupportedPhysicalApertures is called");
4494 CameraSessionNapi* cameraSessionNapi = nullptr;
4495 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
4496 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
4497 MEDIA_ERR_LOG("CameraSessionNapi::GetSupportedPhysicalApertures parse parameter occur error");
4498 return nullptr;
4499 }
4500
4501 napi_status status;
4502 napi_value result = nullptr;
4503 status = napi_create_array(env, &result);
4504 if (status != napi_ok) {
4505 MEDIA_ERR_LOG("napi_create_array call Failed!");
4506 return nullptr;
4507 }
4508
4509 if (status == napi_ok && cameraSessionNapi->cameraSession_ != nullptr) {
4510 std::vector<std::vector<float>> physicalApertures = {};
4511 int32_t retCode = cameraSessionNapi->cameraSession_->GetSupportedPhysicalApertures(physicalApertures);
4512 MEDIA_INFO_LOG("GetSupportedPhysicalApertures len = %{public}zu", physicalApertures.size());
4513 if (!CameraNapiUtils::CheckError(env, retCode)) {
4514 return nullptr;
4515 }
4516 if (!physicalApertures.empty()) {
4517 result = CameraNapiUtils::ProcessingPhysicalApertures(env, physicalApertures);
4518 }
4519 } else {
4520 MEDIA_ERR_LOG("GetSupportedPhysicalApertures call Failed!");
4521 }
4522 return result;
4523 }
4524
GetPhysicalAperture(napi_env env,napi_callback_info info)4525 napi_value CameraSessionNapi::GetPhysicalAperture(napi_env env, napi_callback_info info)
4526 {
4527 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4528 MEDIA_ERR_LOG("SystemApi GetPhysicalAperture is called!");
4529 return nullptr;
4530 }
4531 MEDIA_DEBUG_LOG("GetPhysicalAperture is called");
4532 CameraSessionNapi* cameraSessionNapi = nullptr;
4533 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
4534 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
4535 MEDIA_ERR_LOG("CameraSessionNapi::GetPhysicalAperture parse parameter occur error");
4536 return nullptr;
4537 }
4538
4539 if (cameraSessionNapi->cameraSession_ != nullptr) {
4540 float physicalAperture = 0.0;
4541 int32_t retCode = cameraSessionNapi->cameraSession_->GetPhysicalAperture(physicalAperture);
4542 if (!CameraNapiUtils::CheckError(env, retCode)) {
4543 return nullptr;
4544 }
4545 napi_value result = nullptr;
4546 napi_create_double(env, CameraNapiUtils::FloatToDouble(physicalAperture), &result);
4547 return result;
4548 } else {
4549 MEDIA_ERR_LOG("GetPhysicalAperture call Failed!");
4550 }
4551 return CameraNapiUtils::GetUndefinedValue(env);
4552 }
4553
SetPhysicalAperture(napi_env env,napi_callback_info info)4554 napi_value CameraSessionNapi::SetPhysicalAperture(napi_env env, napi_callback_info info)
4555 {
4556 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4557 MEDIA_ERR_LOG("SystemApi SetPhysicalAperture is called!");
4558 return nullptr;
4559 }
4560 MEDIA_DEBUG_LOG("SetPhysicalAperture is called");
4561 double physicalAperture;
4562 CameraSessionNapi* cameraSessionNapi = nullptr;
4563 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, physicalAperture);
4564 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
4565 MEDIA_ERR_LOG("CameraSessionNapi::SetPhysicalAperture parse parameter occur error");
4566 return nullptr;
4567 }
4568
4569 if (cameraSessionNapi->cameraSession_ != nullptr) {
4570 cameraSessionNapi->cameraSession_->LockForControl();
4571 int32_t retCode = cameraSessionNapi->cameraSession_->SetPhysicalAperture((float)physicalAperture);
4572 MEDIA_INFO_LOG("SetPhysicalAperture set physicalAperture %{public}f!", ConfusingNumber(physicalAperture));
4573 cameraSessionNapi->cameraSession_->UnlockForControl();
4574 if (!CameraNapiUtils::CheckError(env, retCode)) {
4575 return nullptr;
4576 }
4577 } else {
4578 MEDIA_ERR_LOG("SetPhysicalAperture call Failed!");
4579 }
4580 return CameraNapiUtils::GetUndefinedValue(env);
4581 }
4582
SetUsage(napi_env env,napi_callback_info info)4583 napi_value CameraSessionNapi::SetUsage(napi_env env, napi_callback_info info)
4584 {
4585 MEDIA_DEBUG_LOG("SetUsage is called");
4586 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4587 MEDIA_ERR_LOG("SystemApi SetUsage is called!");
4588 return nullptr;
4589 }
4590
4591 uint32_t usageType;
4592 bool enabled;
4593 CameraSessionNapi* cameraSessionNapi = nullptr;
4594 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, usageType, enabled);
4595 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
4596 MEDIA_ERR_LOG("CameraSessionNapi::SetUsage parse parameter occur error");
4597 return nullptr;
4598 }
4599
4600 cameraSessionNapi->cameraSession_->LockForControl();
4601 cameraSessionNapi->cameraSession_->SetUsage(static_cast<UsageType>(usageType), enabled);
4602 cameraSessionNapi->cameraSession_->UnlockForControl();
4603
4604 MEDIA_DEBUG_LOG("CameraSessionNapi::SetUsage success");
4605
4606 return CameraNapiUtils::GetUndefinedValue(env);
4607 }
4608
RegisterExposureCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4609 void CameraSessionNapi::RegisterExposureCallbackListener(
4610 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4611 {
4612 if (exposureCallback_ == nullptr) {
4613 exposureCallback_ = std::make_shared<ExposureCallbackListener>(env);
4614 cameraSession_->SetExposureCallback(exposureCallback_);
4615 }
4616 exposureCallback_->SaveCallbackReference(eventName, callback, isOnce);
4617 }
4618
UnregisterExposureCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4619 void CameraSessionNapi::UnregisterExposureCallbackListener(
4620 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4621 {
4622 if (exposureCallback_ == nullptr) {
4623 MEDIA_ERR_LOG("exposureCallback is null");
4624 return;
4625 }
4626 exposureCallback_->RemoveCallbackRef(eventName, callback);
4627 }
4628
RegisterFocusCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4629 void CameraSessionNapi::RegisterFocusCallbackListener(
4630 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4631 {
4632 if (focusCallback_ == nullptr) {
4633 focusCallback_ = make_shared<FocusCallbackListener>(env);
4634 cameraSession_->SetFocusCallback(focusCallback_);
4635 }
4636 focusCallback_->SaveCallbackReference(eventName, callback, isOnce);
4637 }
4638
UnregisterFocusCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4639 void CameraSessionNapi::UnregisterFocusCallbackListener(
4640 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4641 {
4642 if (focusCallback_ == nullptr) {
4643 MEDIA_ERR_LOG("focusCallback is null");
4644 return;
4645 }
4646 focusCallback_->RemoveCallbackRef(eventName, callback);
4647 }
4648
RegisterMacroStatusCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4649 void CameraSessionNapi::RegisterMacroStatusCallbackListener(
4650 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4651 {
4652 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4653 MEDIA_ERR_LOG("SystemApi on macroStatusChanged is called!");
4654 return;
4655 }
4656 if (macroStatusCallback_ == nullptr) {
4657 macroStatusCallback_ = std::make_shared<MacroStatusCallbackListener>(env);
4658 cameraSession_->SetMacroStatusCallback(macroStatusCallback_);
4659 }
4660 macroStatusCallback_->SaveCallbackReference(eventName, callback, isOnce);
4661 }
4662
UnregisterMacroStatusCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4663 void CameraSessionNapi::UnregisterMacroStatusCallbackListener(
4664 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4665 {
4666 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4667 MEDIA_ERR_LOG("SystemApi off macroStatusChanged is called!");
4668 return;
4669 }
4670 if (macroStatusCallback_ == nullptr) {
4671 MEDIA_ERR_LOG("macroStatusCallback is null");
4672 return;
4673 }
4674 macroStatusCallback_->RemoveCallbackRef(eventName, callback);
4675 }
4676
RegisterMoonCaptureBoostCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4677 void CameraSessionNapi::RegisterMoonCaptureBoostCallbackListener(
4678 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4679 {
4680 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4681 MEDIA_ERR_LOG("SystemApi on moonCaptureBoostStatus is called!");
4682 return;
4683 }
4684 if (moonCaptureBoostCallback_ == nullptr) {
4685 moonCaptureBoostCallback_ = std::make_shared<MoonCaptureBoostCallbackListener>(env);
4686 cameraSession_->SetMoonCaptureBoostStatusCallback(moonCaptureBoostCallback_);
4687 }
4688 moonCaptureBoostCallback_->SaveCallbackReference(eventName, callback, isOnce);
4689 }
4690
UnregisterMoonCaptureBoostCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4691 void CameraSessionNapi::UnregisterMoonCaptureBoostCallbackListener(
4692 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4693 {
4694 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4695 MEDIA_ERR_LOG("SystemApi off moonCaptureBoostStatus is called!");
4696 return;
4697 }
4698 if (moonCaptureBoostCallback_ == nullptr) {
4699 MEDIA_ERR_LOG("macroStatusCallback is null");
4700 return;
4701 }
4702 moonCaptureBoostCallback_->RemoveCallbackRef(eventName, callback);
4703 }
4704
RegisterFeatureDetectionStatusListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4705 void CameraSessionNapi::RegisterFeatureDetectionStatusListener(
4706 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4707 {
4708 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4709 MEDIA_ERR_LOG("SystemApi on featureDetectionStatus is called!");
4710 return;
4711 }
4712 int32_t featureType = SceneFeature::FEATURE_ENUM_MAX;
4713 CameraNapiParamParser jsParamParser(env, args, featureType);
4714 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "Invalid feature type")) {
4715 MEDIA_ERR_LOG("CameraSessionNapi::RegisterFeatureDetectionStatusListener Invalid feature type");
4716 return;
4717 }
4718 if (featureType < SceneFeature::FEATURE_ENUM_MIN || featureType >= SceneFeature::FEATURE_ENUM_MAX) {
4719 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "scene feature invalid");
4720 MEDIA_ERR_LOG("CameraSessionNapi::RegisterFeatureDetectionStatusListener scene feature invalid");
4721 return;
4722 }
4723
4724 if (featureDetectionStatusCallback_ == nullptr) {
4725 featureDetectionStatusCallback_ = std::make_shared<FeatureDetectionStatusCallbackListener>(env);
4726 cameraSession_->SetFeatureDetectionStatusCallback(featureDetectionStatusCallback_);
4727 }
4728
4729 if (featureType == SceneFeature::FEATURE_LOW_LIGHT_BOOST) {
4730 cameraSession_->LockForControl();
4731 cameraSession_->EnableLowLightDetection(true);
4732 cameraSession_->UnlockForControl();
4733 }
4734 if (featureType == SceneFeature::FEATURE_TRIPOD_DETECTION) {
4735 cameraSession_->LockForControl();
4736 cameraSession_->EnableTripodDetection(true);
4737 cameraSession_->UnlockForControl();
4738 }
4739 featureDetectionStatusCallback_->SaveCallbackReference(eventName + std::to_string(featureType), callback, isOnce);
4740 }
4741
UnregisterFeatureDetectionStatusListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4742 void CameraSessionNapi::UnregisterFeatureDetectionStatusListener(
4743 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4744 {
4745 if (!CameraNapiSecurity::CheckSystemApp(env)) {
4746 MEDIA_ERR_LOG("SystemApi off featureDetectionStatus is called!");
4747 return;
4748 }
4749 if (featureDetectionStatusCallback_ == nullptr) {
4750 MEDIA_WARNING_LOG("featureDetectionStatusCallback_ is null");
4751 return;
4752 }
4753 int32_t featureType = SceneFeature::FEATURE_ENUM_MAX;
4754 CameraNapiParamParser jsParamParser(env, args, featureType);
4755 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "Invalid feature type")) {
4756 MEDIA_ERR_LOG("CameraSessionNapi::RegisterFeatureDetectionStatusListener Invalid feature type");
4757 return;
4758 }
4759 if (featureType < SceneFeature::FEATURE_ENUM_MIN || featureType >= SceneFeature::FEATURE_ENUM_MAX) {
4760 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "scene feature invalid");
4761 MEDIA_ERR_LOG("CameraSessionNapi::RegisterFeatureDetectionStatusListener scene feature invalid");
4762 return;
4763 }
4764
4765 featureDetectionStatusCallback_->RemoveCallbackRef(eventName + std::to_string(featureType), callback);
4766
4767 if (featureType == SceneFeature::FEATURE_LOW_LIGHT_BOOST &&
4768 !featureDetectionStatusCallback_->IsFeatureSubscribed(SceneFeature::FEATURE_LOW_LIGHT_BOOST)) {
4769 cameraSession_->LockForControl();
4770 cameraSession_->EnableLowLightDetection(false);
4771 cameraSession_->UnlockForControl();
4772 }
4773 if (featureType == SceneFeature::FEATURE_TRIPOD_DETECTION &&
4774 !featureDetectionStatusCallback_->IsFeatureSubscribed(SceneFeature::FEATURE_TRIPOD_DETECTION)) {
4775 cameraSession_->LockForControl();
4776 cameraSession_->EnableTripodDetection(false);
4777 cameraSession_->UnlockForControl();
4778 }
4779 }
4780
RegisterSessionErrorCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4781 void CameraSessionNapi::RegisterSessionErrorCallbackListener(
4782 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4783 {
4784 if (sessionCallback_ == nullptr) {
4785 sessionCallback_ = std::make_shared<SessionCallbackListener>(env);
4786 cameraSession_->SetCallback(sessionCallback_);
4787 }
4788 sessionCallback_->SaveCallbackReference(eventName, callback, isOnce);
4789 }
4790
UnregisterSessionErrorCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4791 void CameraSessionNapi::UnregisterSessionErrorCallbackListener(
4792 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4793 {
4794 if (sessionCallback_ == nullptr) {
4795 MEDIA_ERR_LOG("sessionCallback is null");
4796 return;
4797 }
4798 sessionCallback_->RemoveCallbackRef(eventName, callback);
4799 }
4800
RegisterEffectSuggestionCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4801 void CameraSessionNapi::RegisterEffectSuggestionCallbackListener(
4802 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4803 {
4804 if (effectSuggestionCallback_ == nullptr) {
4805 auto effectSuggestionCallback = std::make_shared<EffectSuggestionCallbackListener>(env);
4806 effectSuggestionCallback_ = effectSuggestionCallback;
4807 cameraSession_->SetEffectSuggestionCallback(effectSuggestionCallback);
4808 }
4809 effectSuggestionCallback_->SaveCallbackReference(eventName, callback, isOnce);
4810 }
4811
UnregisterEffectSuggestionCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4812 void CameraSessionNapi::UnregisterEffectSuggestionCallbackListener(
4813 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4814 {
4815 if (effectSuggestionCallback_ == nullptr) {
4816 MEDIA_ERR_LOG("effectSuggestionCallback is null");
4817 } else {
4818 effectSuggestionCallback_->RemoveCallbackRef(eventName, callback);
4819 }
4820 }
4821
RegisterAbilityChangeCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4822 void CameraSessionNapi::RegisterAbilityChangeCallbackListener(
4823 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4824 {
4825 if (abilityCallback_ == nullptr) {
4826 auto abilityCallback = std::make_shared<AbilityCallbackListener>(env);
4827 abilityCallback_ = abilityCallback;
4828 cameraSession_->SetAbilityCallback(abilityCallback);
4829 }
4830 abilityCallback_->SaveCallbackReference(eventName, callback, isOnce);
4831 }
4832
UnregisterAbilityChangeCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4833 void CameraSessionNapi::UnregisterAbilityChangeCallbackListener(
4834 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4835 {
4836 if (abilityCallback_ == nullptr) {
4837 MEDIA_ERR_LOG("abilityCallback is null");
4838 } else {
4839 abilityCallback_->RemoveCallbackRef(eventName, callback);
4840 }
4841 }
4842
RegisterSmoothZoomCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4843 void CameraSessionNapi::RegisterSmoothZoomCallbackListener(
4844 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4845 {
4846 if (smoothZoomCallback_ == nullptr) {
4847 smoothZoomCallback_ = std::make_shared<SmoothZoomCallbackListener>(env);
4848 cameraSession_->SetSmoothZoomCallback(smoothZoomCallback_);
4849 }
4850 smoothZoomCallback_->SaveCallbackReference(eventName, callback, isOnce);
4851 }
4852
UnregisterSmoothZoomCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4853 void CameraSessionNapi::UnregisterSmoothZoomCallbackListener(
4854 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4855 {
4856 if (smoothZoomCallback_ == nullptr) {
4857 MEDIA_ERR_LOG("smoothZoomCallback is null");
4858 return;
4859 }
4860 smoothZoomCallback_->RemoveCallbackRef(eventName, callback);
4861 }
4862
RegisterExposureInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4863 void CameraSessionNapi::RegisterExposureInfoCallbackListener(
4864 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4865 {
4866 CameraNapiUtils::ThrowError(
4867 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be registered in current session!");
4868 }
4869
UnregisterExposureInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4870 void CameraSessionNapi::UnregisterExposureInfoCallbackListener(
4871 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4872 {
4873 CameraNapiUtils::ThrowError(
4874 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be unregistered in current session!");
4875 }
4876
RegisterIsoInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4877 void CameraSessionNapi::RegisterIsoInfoCallbackListener(
4878 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4879 {
4880 CameraNapiUtils::ThrowError(
4881 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be registered in current session!");
4882 }
4883
UnregisterIsoInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4884 void CameraSessionNapi::UnregisterIsoInfoCallbackListener(
4885 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4886 {
4887 CameraNapiUtils::ThrowError(
4888 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be unregistered in current session!");
4889 }
4890
RegisterApertureInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4891 void CameraSessionNapi::RegisterApertureInfoCallbackListener(
4892 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4893 {
4894 CameraNapiUtils::ThrowError(
4895 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be registered in current session!");
4896 }
4897
UnregisterApertureInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4898 void CameraSessionNapi::UnregisterApertureInfoCallbackListener(
4899 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4900 {
4901 CameraNapiUtils::ThrowError(
4902 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be unregistered in current session!");
4903 }
4904
RegisterLuminationInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4905 void CameraSessionNapi::RegisterLuminationInfoCallbackListener(
4906 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4907 {
4908 CameraNapiUtils::ThrowError(
4909 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be registered in current session!");
4910 }
4911
UnregisterLuminationInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4912 void CameraSessionNapi::UnregisterLuminationInfoCallbackListener(
4913 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4914 {
4915 CameraNapiUtils::ThrowError(
4916 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be unregistered in current session!");
4917 }
4918
RegisterSlowMotionStateCb(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4919 void CameraSessionNapi::RegisterSlowMotionStateCb(
4920 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4921 {
4922 CameraNapiUtils::ThrowError(
4923 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be unregistered in current session!");
4924 }
4925
UnregisterSlowMotionStateCb(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4926 void CameraSessionNapi::UnregisterSlowMotionStateCb(
4927 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4928 {
4929 CameraNapiUtils::ThrowError(
4930 env, CameraErrorCode::OPERATION_NOT_ALLOWED, "this type callback can not be unregistered in current session!");
4931 }
4932
RegisterTryAEInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4933 void CameraSessionNapi::RegisterTryAEInfoCallbackListener(
4934 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4935 {
4936 CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED,
4937 "this type callback can not be registered in current session!");
4938 }
4939
UnregisterTryAEInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4940 void CameraSessionNapi::UnregisterTryAEInfoCallbackListener(
4941 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
4942 {
4943 CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED,
4944 "this type callback can not be unregistered in current session!");
4945 }
4946
RegisterFocusTrackingInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)4947 void CameraSessionNapi::RegisterFocusTrackingInfoCallbackListener(const std::string& eventName,
4948 napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
4949 {
4950 CHECK_ERROR_RETURN_LOG(!CameraNapiSecurity::CheckSystemApp(env),
4951 "SystemApi on focusTrackingInfoAvailable is called");
4952 CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED,
4953 "this type callback can not be registered in current session!");
4954 }
4955
UnregisterFocusTrackingInfoCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)4956 void CameraSessionNapi::UnregisterFocusTrackingInfoCallbackListener(const std::string& eventName,
4957 napi_env env, napi_value callback, const std::vector<napi_value>& args)
4958 {
4959 CHECK_ERROR_RETURN_LOG(!CameraNapiSecurity::CheckSystemApp(env),
4960 "SystemApi off focusTrackingInfoAvailable is called");
4961 CameraNapiUtils::ThrowError(env, CameraErrorCode::OPERATION_NOT_ALLOWED,
4962 "this type callback can not be unregistered in current session!");
4963 }
4964
4965 const CameraSessionNapi::EmitterFunctions CameraSessionNapi::fun_map_ = {
4966 { "exposureStateChange", {
4967 &CameraSessionNapi::RegisterExposureCallbackListener,
4968 &CameraSessionNapi::UnregisterExposureCallbackListener} },
4969 { "focusStateChange", {
4970 &CameraSessionNapi::RegisterFocusCallbackListener,
4971 &CameraSessionNapi::UnregisterFocusCallbackListener } },
4972 { "macroStatusChanged", {
4973 &CameraSessionNapi::RegisterMacroStatusCallbackListener,
4974 &CameraSessionNapi::UnregisterMacroStatusCallbackListener } },
4975 { "moonCaptureBoostStatus", {
4976 &CameraSessionNapi::RegisterMoonCaptureBoostCallbackListener,
4977 &CameraSessionNapi::UnregisterMoonCaptureBoostCallbackListener } },
4978 { "featureDetection", {
4979 &CameraSessionNapi::RegisterFeatureDetectionStatusListener,
4980 &CameraSessionNapi::UnregisterFeatureDetectionStatusListener } },
4981 { "featureDetectionStatus", {
4982 &CameraSessionNapi::RegisterFeatureDetectionStatusListener,
4983 &CameraSessionNapi::UnregisterFeatureDetectionStatusListener } },
4984 { "error", {
4985 &CameraSessionNapi::RegisterSessionErrorCallbackListener,
4986 &CameraSessionNapi::UnregisterSessionErrorCallbackListener } },
4987 { "smoothZoomInfoAvailable", {
4988 &CameraSessionNapi::RegisterSmoothZoomCallbackListener,
4989 &CameraSessionNapi::UnregisterSmoothZoomCallbackListener } },
4990 { "slowMotionStatus", {
4991 &CameraSessionNapi::RegisterSlowMotionStateCb,
4992 &CameraSessionNapi::UnregisterSlowMotionStateCb } },
4993 { "exposureInfoChange", {
4994 &CameraSessionNapi::RegisterExposureInfoCallbackListener,
4995 &CameraSessionNapi::UnregisterExposureInfoCallbackListener} },
4996 { "isoInfoChange", {
4997 &CameraSessionNapi::RegisterIsoInfoCallbackListener,
4998 &CameraSessionNapi::UnregisterIsoInfoCallbackListener } },
4999 { "apertureInfoChange", {
5000 &CameraSessionNapi::RegisterApertureInfoCallbackListener,
5001 &CameraSessionNapi::UnregisterApertureInfoCallbackListener } },
5002 { "luminationInfoChange", {
5003 &CameraSessionNapi::RegisterLuminationInfoCallbackListener,
5004 &CameraSessionNapi::UnregisterLuminationInfoCallbackListener } },
5005 { "abilityChange", {
5006 &CameraSessionNapi::RegisterAbilityChangeCallbackListener,
5007 &CameraSessionNapi::UnregisterAbilityChangeCallbackListener } },
5008 { "effectSuggestionChange", {
5009 &CameraSessionNapi::RegisterEffectSuggestionCallbackListener,
5010 &CameraSessionNapi::UnregisterEffectSuggestionCallbackListener } },
5011 { "tryAEInfoChange", {
5012 &CameraSessionNapi::RegisterTryAEInfoCallbackListener,
5013 &CameraSessionNapi::UnregisterTryAEInfoCallbackListener } },
5014 { "lcdFlashStatus", {
5015 &CameraSessionNapi::RegisterLcdFlashStatusCallbackListener,
5016 &CameraSessionNapi::UnregisterLcdFlashStatusCallbackListener } },
5017 { "autoDeviceSwitchStatusChange", {
5018 &CameraSessionNapi::RegisterAutoDeviceSwitchCallbackListener,
5019 &CameraSessionNapi::UnregisterAutoDeviceSwitchCallbackListener } },
5020 { "focusTrackingInfoAvailable", {
5021 &CameraSessionNapi::RegisterFocusTrackingInfoCallbackListener,
5022 &CameraSessionNapi::UnregisterFocusTrackingInfoCallbackListener } },
5023 };
5024
GetEmitterFunctions()5025 const CameraSessionNapi::EmitterFunctions& CameraSessionNapi::GetEmitterFunctions()
5026 {
5027 return fun_map_;
5028 }
5029
On(napi_env env,napi_callback_info info)5030 napi_value CameraSessionNapi::On(napi_env env, napi_callback_info info)
5031 {
5032 return ListenerTemplate<CameraSessionNapi>::On(env, info);
5033 }
5034
Once(napi_env env,napi_callback_info info)5035 napi_value CameraSessionNapi::Once(napi_env env, napi_callback_info info)
5036 {
5037 return ListenerTemplate<CameraSessionNapi>::Once(env, info);
5038 }
5039
Off(napi_env env,napi_callback_info info)5040 napi_value CameraSessionNapi::Off(napi_env env, napi_callback_info info)
5041 {
5042 return ListenerTemplate<CameraSessionNapi>::Off(env, info);
5043 }
5044
RegisterLcdFlashStatusCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)5045 void CameraSessionNapi::RegisterLcdFlashStatusCallbackListener(
5046 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
5047 {
5048 if (!CameraNapiSecurity::CheckSystemApp(env)) {
5049 MEDIA_ERR_LOG("SystemApi on LcdFlashStatus is called!");
5050 return;
5051 }
5052 if (cameraSession_ == nullptr) {
5053 MEDIA_ERR_LOG("cameraSession is null!");
5054 return;
5055 }
5056 if (lcdFlashStatusCallback_ == nullptr) {
5057 lcdFlashStatusCallback_ = std::make_shared<LcdFlashStatusCallbackListener>(env);
5058 cameraSession_->SetLcdFlashStatusCallback(lcdFlashStatusCallback_);
5059 }
5060 lcdFlashStatusCallback_->SaveCallbackReference(eventName, callback, isOnce);
5061 cameraSession_->LockForControl();
5062 cameraSession_->EnableLcdFlashDetection(true);
5063 cameraSession_->UnlockForControl();
5064 }
5065
UnregisterLcdFlashStatusCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)5066 void CameraSessionNapi::UnregisterLcdFlashStatusCallbackListener(
5067 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
5068 {
5069 if (lcdFlashStatusCallback_ == nullptr) {
5070 MEDIA_ERR_LOG("lcdFlashStatusCallback is null");
5071 return;
5072 }
5073 lcdFlashStatusCallback_->RemoveCallbackRef(eventName, callback);
5074 if (lcdFlashStatusCallback_->IsEmpty("lcdFlashStatus")) {
5075 cameraSession_->LockForControl();
5076 cameraSession_->EnableLcdFlashDetection(false);
5077 cameraSession_->UnlockForControl();
5078 }
5079 }
5080
IsAutoDeviceSwitchSupported(napi_env env,napi_callback_info info)5081 napi_value CameraSessionNapi::IsAutoDeviceSwitchSupported(napi_env env, napi_callback_info info)
5082 {
5083 MEDIA_INFO_LOG("IsAutoDeviceSwitchSupported is called");
5084 CameraSessionNapi* cameraSessionNapi = nullptr;
5085 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi);
5086 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
5087 MEDIA_ERR_LOG("CameraSessionNapi::IsAutoDeviceSwitchSupported parse parameter occur error");
5088 return nullptr;
5089 }
5090 auto result = CameraNapiUtils::GetUndefinedValue(env);
5091 if (cameraSessionNapi->cameraSession_ != nullptr) {
5092 bool isSupported = cameraSessionNapi->cameraSession_->IsAutoDeviceSwitchSupported();
5093 napi_get_boolean(env, isSupported, &result);
5094 } else {
5095 MEDIA_ERR_LOG("CameraSessionNapi::IsAutoDeviceSwitchSupported get native object fail");
5096 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
5097 return nullptr;
5098 }
5099 return result;
5100 }
5101
EnableAutoDeviceSwitch(napi_env env,napi_callback_info info)5102 napi_value CameraSessionNapi::EnableAutoDeviceSwitch(napi_env env, napi_callback_info info)
5103 {
5104 MEDIA_DEBUG_LOG("CameraSessionNapi::EnableAutoDeviceSwitch is called");
5105 bool isEnable;
5106 CameraSessionNapi* cameraSessionNapi = nullptr;
5107 CameraNapiParamParser jsParamParser(env, info, cameraSessionNapi, isEnable);
5108 if (!jsParamParser.AssertStatus(INVALID_ARGUMENT, "parse parameter occur error")) {
5109 MEDIA_ERR_LOG("CameraSessionNapi::EnableAutoDeviceSwitch parse parameter occur error");
5110 return nullptr;
5111 }
5112
5113 if (cameraSessionNapi->cameraSession_ != nullptr) {
5114 MEDIA_INFO_LOG("CameraSessionNapi::EnableAutoDeviceSwitch:%{public}d", isEnable);
5115 cameraSessionNapi->cameraSession_->LockForControl();
5116 int32_t retCode = cameraSessionNapi->cameraSession_->EnableAutoDeviceSwitch(isEnable);
5117 cameraSessionNapi->cameraSession_->UnlockForControl();
5118 if (!CameraNapiUtils::CheckError(env, retCode)) {
5119 MEDIA_ERR_LOG("CameraSessionNapi::EnableAutoSwitchDevice fail %{public}d", retCode);
5120 return nullptr;
5121 }
5122 } else {
5123 MEDIA_ERR_LOG("CameraSessionNapi::EnableAutoDeviceSwitch get native object fail");
5124 CameraNapiUtils::ThrowError(env, INVALID_ARGUMENT, "get native object fail");
5125 return nullptr;
5126 }
5127 return CameraNapiUtils::GetUndefinedValue(env);
5128 }
5129
RegisterAutoDeviceSwitchCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args,bool isOnce)5130 void CameraSessionNapi::RegisterAutoDeviceSwitchCallbackListener(
5131 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args, bool isOnce)
5132 {
5133 if (cameraSession_ == nullptr) {
5134 MEDIA_ERR_LOG("cameraSession is null!");
5135 return;
5136 }
5137 if (autoDeviceSwitchCallback_ == nullptr) {
5138 autoDeviceSwitchCallback_ = std::make_shared<AutoDeviceSwitchCallbackListener>(env);
5139 cameraSession_->SetAutoDeviceSwitchCallback(autoDeviceSwitchCallback_);
5140 }
5141 autoDeviceSwitchCallback_->SaveCallbackReference(eventName, callback, isOnce);
5142 }
5143
UnregisterAutoDeviceSwitchCallbackListener(const std::string & eventName,napi_env env,napi_value callback,const std::vector<napi_value> & args)5144 void CameraSessionNapi::UnregisterAutoDeviceSwitchCallbackListener(
5145 const std::string& eventName, napi_env env, napi_value callback, const std::vector<napi_value>& args)
5146 {
5147 if (autoDeviceSwitchCallback_ == nullptr) {
5148 MEDIA_ERR_LOG("autoDeviceSwitchCallback is nullptr.");
5149 return;
5150 }
5151 autoDeviceSwitchCallback_->RemoveCallbackRef(eventName, callback);
5152 }
5153 } // namespace CameraStandard
5154 } // namespace OHOS
5155