1 /*
2 * Copyright (C) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include "CameraJS.h"
16
17 #include <meta/api/make_callback.h>
18 #include <meta/interface/intf_task_queue.h>
19 #include <meta/interface/intf_task_queue_registry.h>
20 #include <scene/interface/intf_camera.h>
21 #include <scene/interface/intf_node.h>
22 #include <scene/interface/intf_raycast.h>
23 #include <scene/interface/intf_scene.h>
24
25 #ifdef __SCENE_ADAPTER__
26 #include "scene_adapter/scene_adapter.h"
27 #endif
28
29 #include "PromiseBase.h"
30 #include "Raycast.h"
31 #include "SceneJS.h"
32 #include "Vec2Proxy.h"
33 #include "Vec3Proxy.h"
34 static constexpr uint32_t ACTIVE_RENDER_BIT = 1; // CameraComponent::ACTIVE_RENDER_BIT comes from lume3d...
35 const float CAM_INDEX_RENDER = 15.7f;
36
GetInstanceImpl(uint32_t id)37 void* CameraJS::GetInstanceImpl(uint32_t id)
38 {
39 if (id == CameraJS::ID)
40 return this;
41 return NodeImpl::GetInstanceImpl(id);
42 }
DisposeNative(void * sc)43 void CameraJS::DisposeNative(void* sc)
44 {
45 if (!disposed_) {
46 LOG_V("CameraJS::DisposeNative");
47 disposed_ = true;
48
49 if (auto* sceneJS = static_cast<SceneJS*>(sc)) {
50 sceneJS->ReleaseStrongDispose(reinterpret_cast<uintptr_t>(&scene_));
51 }
52
53 // make sure we release postProc settings
54 if (auto ps = postProc_.GetObject()) {
55 NapiApi::Function func = ps.Get<NapiApi::Function>("destroy");
56 if (func) {
57 func.Invoke(ps);
58 }
59 }
60 postProc_.Reset();
61
62 clearColor_.reset();
63 if (auto cam = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
64 // reset the native object refs
65 SetNativeObject(nullptr, false);
66 SetNativeObject(nullptr, true);
67
68 auto ptr = cam->PostProcess()->GetValue();
69 ReleaseObject(interface_pointer_cast<META_NS::IObject>(ptr));
70 ptr.reset();
71 cam->PostProcess()->SetValue(nullptr);
72 // dispose all extra objects.
73 resources_.clear();
74
75 if (auto camnode = interface_pointer_cast<SCENE_NS::INode>(cam)) {
76 cam->SetActive(false);
77 if (auto scene = camnode->GetScene()) {
78 scene->ReleaseNode(BASE_NS::move(camnode), false);
79 }
80 }
81 }
82 scene_.Reset();
83 }
84 }
Init(napi_env env,napi_value exports)85 void CameraJS::Init(napi_env env, napi_value exports)
86 {
87 BASE_NS::vector<napi_property_descriptor> node_props;
88 NodeImpl::GetPropertyDescs(node_props);
89
90 using namespace NapiApi;
91 node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetFov, &CameraJS::SetFov>("fov"));
92 node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetNear, &CameraJS::SetNear>("nearPlane"));
93 node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetFar, &CameraJS::SetFar>("farPlane"));
94 node_props.push_back(GetSetProperty<bool, CameraJS, &CameraJS::GetEnabled, &CameraJS::SetEnabled>("enabled"));
95 node_props.push_back(GetSetProperty<bool, CameraJS, &CameraJS::GetMSAA, &CameraJS::SetMSAA>("msaa"));
96 node_props.push_back(
97 GetSetProperty<Object, CameraJS, &CameraJS::GetPostProcess, &CameraJS::SetPostProcess>("postProcess"));
98 node_props.push_back(GetSetProperty<Object, CameraJS, &CameraJS::GetColor, &CameraJS::SetColor>("clearColor"));
99
100 node_props.push_back(Method<FunctionContext<Object>, CameraJS, &CameraJS::WorldToScreen>("worldToScreen"));
101 node_props.push_back(Method<FunctionContext<Object>, CameraJS, &CameraJS::ScreenToWorld>("screenToWorld"));
102 node_props.push_back(Method<FunctionContext<Object, Object>, CameraJS, &CameraJS::Raycast>("raycast"));
103
104 napi_value func;
105 auto status = napi_define_class(env, "Camera", NAPI_AUTO_LENGTH, BaseObject::ctor<CameraJS>(), nullptr,
106 node_props.size(), node_props.data(), &func);
107
108 NapiApi::MyInstanceState* mis;
109 GetInstanceData(env, (void**)&mis);
110 mis->StoreCtor("Camera", func);
111 }
112
CameraJS(napi_env e,napi_callback_info i)113 CameraJS::CameraJS(napi_env e, napi_callback_info i) : BaseObject<CameraJS>(e, i), NodeImpl(NodeImpl::CAMERA)
114 {
115 NapiApi::FunctionContext<NapiApi::Object, NapiApi::Object> fromJs(e, i);
116 if (!fromJs) {
117 // no arguments. so internal create.
118 // expecting caller to finish initialization
119 return;
120 }
121 // java script call.. with arguments
122 NapiApi::Object scene = fromJs.Arg<0>();
123 scene_ = scene;
124
125 auto scn = GetNativeMeta<SCENE_NS::IScene>(scene);
126 if (scn == nullptr) {
127 // hmm..
128 LOG_F("Invalid scene for CameraJS!");
129 return;
130 }
131
132 NapiApi::Object meJs(fromJs.This());
133 if (auto sceneJS = GetJsWrapper<SceneJS>(scene)) {
134 sceneJS->StrongDisposeHook(reinterpret_cast<uintptr_t>(&scene_), meJs);
135 }
136
137 NapiApi::Object args = fromJs.Arg<1>();
138 auto obj = GetNativeObjectParam<META_NS::IObject>(args);
139 if (obj) {
140 // linking to an existing object.
141 SetNativeObject(obj, false);
142 StoreJsObj(obj, meJs);
143 return;
144 }
145
146 // collect parameters
147 NapiApi::Value<BASE_NS::string> name;
148 NapiApi::Value<BASE_NS::string> path;
149
150 if (auto prm = args.Get("name")) {
151 name = NapiApi::Value<BASE_NS::string>(e, prm);
152 }
153 if (auto prm = args.Get("path")) {
154 path = NapiApi::Value<BASE_NS::string>(e, prm);
155 }
156
157 uint32_t pipeline = uint32_t(SCENE_NS::CameraPipeline::LIGHT_FORWARD);
158 if (auto prm = args.Get("renderPipeline")) {
159 pipeline = NapiApi::Value<uint32_t>(e, prm);
160 }
161
162 BASE_NS::string nodePath;
163
164 if (path.IsDefined()) {
165 // create using path
166 nodePath = path.valueOrDefault("");
167 } else if (name.IsDefined()) {
168 // use the name as path (creates under root)
169 nodePath = name.valueOrDefault("");
170 }
171
172 // Create actual camera object.
173 SCENE_NS::ICamera::Ptr node = nullptr;
174 node = scn->CreateNode<SCENE_NS::ICamera>(nodePath, SCENE_NS::ClassId::CameraNode).GetResult();
175 node->RenderingPipeline()->SetValue(SCENE_NS::CameraPipeline(pipeline));
176 node->SetActive(false);
177 node->ColorTargetCustomization()->SetValue({SCENE_NS::ColorFormat{BASE_NS::BASE_FORMAT_R16G16B16A16_SFLOAT}});
178 SetNativeObject(interface_pointer_cast<META_NS::IObject>(node), false);
179 node.reset();
180 StoreJsObj(GetNativeObject(), meJs);
181
182 if (name.IsDefined()) {
183 // set the name of the object. if we were given one
184 meJs.Set("name", name);
185 }
186 meJs.Set("postProcess", fromJs.GetNull());
187 }
Finalize(napi_env env)188 void CameraJS::Finalize(napi_env env)
189 {
190 // make sure the camera gets deactivated (the actual c++ camera might not be destroyed here)
191 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
192 if (auto scene = interface_cast<SCENE_NS::INode>(camera)->GetScene()) {
193 camera->SetActive(false);
194 }
195 }
196 DisposeNative(GetJsWrapper<SceneJS>(scene_.GetObject()));
197 BaseObject<CameraJS>::Finalize(env);
198 }
~CameraJS()199 CameraJS::~CameraJS()
200 {
201 LOG_V("CameraJS -- ");
202 }
GetFov(NapiApi::FunctionContext<> & ctx)203 napi_value CameraJS::GetFov(NapiApi::FunctionContext<>& ctx)
204 {
205 if (!validateSceneRef()) {
206 return ctx.GetUndefined();
207 }
208
209 float fov = 0.0;
210 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
211 fov = 0.0;
212 if (camera) {
213 fov = camera->FoV()->GetValue();
214 }
215 }
216
217 return ctx.GetNumber(fov);
218 }
219
SetFov(NapiApi::FunctionContext<float> & ctx)220 void CameraJS::SetFov(NapiApi::FunctionContext<float>& ctx)
221 {
222 if (!validateSceneRef()) {
223 return;
224 }
225 float fov = ctx.Arg<0>();
226 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
227 camera->FoV()->SetValue(fov);
228 }
229 }
230
GetEnabled(NapiApi::FunctionContext<> & ctx)231 napi_value CameraJS::GetEnabled(NapiApi::FunctionContext<>& ctx)
232 {
233 if (!validateSceneRef()) {
234 return ctx.GetUndefined();
235 }
236 bool activ = false;
237 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
238 if (camera) {
239 activ = camera->IsActive();
240 }
241 }
242 return ctx.GetBoolean(activ);
243 }
244
SetEnabled(NapiApi::FunctionContext<bool> & ctx)245 void CameraJS::SetEnabled(NapiApi::FunctionContext<bool>& ctx)
246 {
247 if (!validateSceneRef()) {
248 return;
249 }
250 bool activ = ctx.Arg<0>();
251 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
252 ExecSyncTask([camera, activ]() {
253 if (camera) {
254 uint32_t flags = camera->SceneFlags()->GetValue();
255 if (activ) {
256 flags |= uint32_t(SCENE_NS::CameraSceneFlag::MAIN_CAMERA_BIT);
257 } else {
258 flags &= ~uint32_t(SCENE_NS::CameraSceneFlag::MAIN_CAMERA_BIT);
259 }
260 camera->SceneFlags()->SetValue(flags);
261 camera->SetActive(activ);
262 }
263 return META_NS::IAny::Ptr {};
264 });
265 }
266 }
267
GetFar(NapiApi::FunctionContext<> & ctx)268 napi_value CameraJS::GetFar(NapiApi::FunctionContext<>& ctx)
269 {
270 if (!validateSceneRef()) {
271 return ctx.GetUndefined();
272 }
273 float far = 0.0;
274 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
275 far = camera->FarPlane()->GetValue();
276 }
277 return ctx.GetNumber(far);
278 }
279
SetFar(NapiApi::FunctionContext<float> & ctx)280 void CameraJS::SetFar(NapiApi::FunctionContext<float>& ctx)
281 {
282 float fov = ctx.Arg<0>();
283 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
284 camera->FarPlane()->SetValue(fov);
285 }
286 }
287
GetNear(NapiApi::FunctionContext<> & ctx)288 napi_value CameraJS::GetNear(NapiApi::FunctionContext<>& ctx)
289 {
290 if (!validateSceneRef()) {
291 return ctx.GetUndefined();
292 }
293 float near = 0.0;
294 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
295 near = camera->NearPlane()->GetValue();
296 }
297 return ctx.GetNumber(near);
298 }
299
SetNear(NapiApi::FunctionContext<float> & ctx)300 void CameraJS::SetNear(NapiApi::FunctionContext<float>& ctx)
301 {
302 if (!validateSceneRef()) {
303 return;
304 }
305 float fov = ctx.Arg<0>();
306 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
307 camera->NearPlane()->SetValue(fov);
308 }
309 }
310
GetPostProcess(NapiApi::FunctionContext<> & ctx)311 napi_value CameraJS::GetPostProcess(NapiApi::FunctionContext<>& ctx)
312 {
313 if (!validateSceneRef()) {
314 return ctx.GetUndefined();
315 }
316 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
317 SCENE_NS::IPostProcess::Ptr postproc = nullptr;
318 postproc = camera->PostProcess()->GetValue();
319 if (!postproc) {
320 // early out.
321 return ctx.GetNull();
322 }
323 auto obj = interface_pointer_cast<META_NS::IObject>(postproc);
324 if (auto cached = FetchJsObj(obj)) {
325 // always return the same js object.
326 return cached.ToNapiValue();
327 }
328 NapiApi::Env env(ctx.Env());
329 NapiApi::Object parms(env);
330 napi_value args[] = { ctx.This().ToNapiValue(), parms.ToNapiValue() };
331 // take ownership of the object.
332 postProc_ = NapiApi::StrongRef(CreateFromNativeInstance(env, obj, false, BASE_NS::countof(args), args));
333 return postProc_.GetValue();
334 }
335 return ctx.GetNull();
336 }
337
SetPostProcess(NapiApi::FunctionContext<NapiApi::Object> & ctx)338 void CameraJS::SetPostProcess(NapiApi::FunctionContext<NapiApi::Object>& ctx)
339 {
340 if (!validateSceneRef()) {
341 return;
342 }
343
344 auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject());
345 if (!camera) {
346 return;
347 }
348 NapiApi::Object psp = ctx.Arg<0>();
349 if (auto currentlySet = postProc_.GetObject()) {
350 if (psp.StrictEqual(currentlySet)) {
351 // setting the exactly the same postprocess setting. do nothing.
352 return;
353 }
354 NapiApi::Function func = currentlySet.Get<NapiApi::Function>("destroy");
355 if (func) {
356 func.Invoke(currentlySet);
357 }
358 postProc_.Reset();
359 }
360
361 SCENE_NS::IPostProcess::Ptr postproc;
362 if (psp) {
363 // see if we have a native backing for the input object..
364 TrueRootObject* native = psp.Native<TrueRootObject>();
365 if (!native) {
366 // nope.. so create a new bridged object.
367 napi_value args[] = {
368 ctx.This().ToNapiValue(), // Camera..
369 ctx.Arg<0>().ToNapiValue() // "javascript object for values"
370 };
371 psp = { GetJSConstructor(ctx.Env(), "PostProcessSettings"), BASE_NS::countof(args), args };
372 native = psp.Native<TrueRootObject>();
373 }
374 postProc_ = NapiApi::StrongRef(psp);
375
376 if (native) {
377 postproc = interface_pointer_cast<SCENE_NS::IPostProcess>(native->GetNativeObject());
378 }
379 }
380 camera->PostProcess()->SetValue(postproc);
381 }
382
GetColor(NapiApi::FunctionContext<> & ctx)383 napi_value CameraJS::GetColor(NapiApi::FunctionContext<>& ctx)
384 {
385 if (!validateSceneRef()) {
386 return ctx.GetUndefined();
387 }
388
389 auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetThisNativeObject(ctx));
390 if (!camera) {
391 return ctx.GetUndefined();
392 }
393 uint32_t curBits = camera->PipelineFlags()->GetValue();
394 bool enabled = curBits & static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
395 if (!enabled) {
396 return ctx.GetNull();
397 }
398
399 if (clearColor_ == nullptr) {
400 clearColor_ = BASE_NS::make_unique<ColorProxy>(ctx.Env(), camera->ClearColor());
401 }
402 return clearColor_->Value();
403 }
SetColor(NapiApi::FunctionContext<NapiApi::Object> & ctx)404 void CameraJS::SetColor(NapiApi::FunctionContext<NapiApi::Object>& ctx)
405 {
406 if (!validateSceneRef()) {
407 return;
408 }
409 auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetThisNativeObject(ctx));
410 if (!camera) {
411 return;
412 }
413 if (clearColor_ == nullptr) {
414 clearColor_ = BASE_NS::make_unique<ColorProxy>(ctx.Env(), camera->ClearColor());
415 }
416 NapiApi::Object obj = ctx.Arg<0>();
417 if (obj) {
418 clearColor_->SetValue(obj);
419 clearColorEnabled_ = true;
420 } else {
421 clearColorEnabled_ = false;
422 }
423 // enable camera clear
424 uint32_t curBits = camera->PipelineFlags()->GetValue();
425 if (msaaEnabled_) {
426 curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
427 } else {
428 curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
429 }
430 if (clearColorEnabled_) {
431 curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
432 } else {
433 curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
434 }
435 camera->PipelineFlags()->SetValue(curBits);
436 }
437
WorldToScreen(NapiApi::FunctionContext<NapiApi::Object> & ctx)438 napi_value CameraJS::WorldToScreen(NapiApi::FunctionContext<NapiApi::Object>& ctx)
439 {
440 return ProjectCoords<ProjectionDirection::WORLD_TO_SCREEN>(ctx);
441 }
442
ScreenToWorld(NapiApi::FunctionContext<NapiApi::Object> & ctx)443 napi_value CameraJS::ScreenToWorld(NapiApi::FunctionContext<NapiApi::Object>& ctx)
444 {
445 return ProjectCoords<ProjectionDirection::SCREEN_TO_WORLD>(ctx);
446 }
447
448 template <CameraJS::ProjectionDirection dir>
ProjectCoords(NapiApi::FunctionContext<NapiApi::Object> & ctx)449 napi_value CameraJS::ProjectCoords(NapiApi::FunctionContext<NapiApi::Object>& ctx)
450 {
451 NapiApi::StrongRef scene;
452 auto inCoordJs = ctx.Arg<0>();
453 auto raycastSelf = SCENE_NS::ICameraRayCast::Ptr {};
454 auto inCoord = BASE_NS::Math::Vec3 {};
455 if (!ExtractRaycastStuff(inCoordJs, scene, raycastSelf, inCoord)) {
456 return {};
457 }
458 auto outCoord = BASE_NS::Math::Vec3 {};
459 if constexpr (dir == ProjectionDirection::WORLD_TO_SCREEN) {
460 outCoord = raycastSelf->WorldPositionToScreen(inCoord).GetResult();
461 } else {
462 outCoord = raycastSelf->ScreenPositionToWorld(inCoord).GetResult();
463 }
464 return Vec3Proxy::ToNapiObject(outCoord, ctx.Env()).ToNapiValue();
465 }
466
Raycast(NapiApi::FunctionContext<NapiApi::Object,NapiApi::Object> & ctx)467 napi_value CameraJS::Raycast(NapiApi::FunctionContext<NapiApi::Object, NapiApi::Object>& ctx)
468 {
469 if (!validateSceneRef()) {
470 return ctx.GetUndefined();
471 }
472
473 struct Promise : public PromiseBase {
474 using PromiseBase::PromiseBase;
475 NapiApi::StrongRef this_;
476 NapiApi::StrongRef coordArg_;
477 NapiApi::StrongRef optionArg_;
478 bool SetResult() override
479 {
480 auto* rootObject = static_cast<CameraJS*>(this_.GetObject().Native<TrueRootObject>());
481 if (rootObject == nullptr) {
482 LOG_E("rootObject is nullptr");
483 return false;
484 }
485 result_ = rootObject->Raycast(this_.GetEnv(), coordArg_.GetObject(), optionArg_.GetObject());
486 return (bool)result_;
487 }
488 };
489 auto promise = new Promise(ctx.Env());
490 auto jsPromise = promise->ToNapiValue();
491 promise->this_ = NapiApi::StrongRef(ctx.This());
492 promise->coordArg_ = NapiApi::StrongRef(ctx.Arg<0>());
493 promise->optionArg_ = NapiApi::StrongRef(ctx.Arg<1>());
494
495 auto func = [promise]() {
496 promise->SettleLater();
497 return false;
498 };
499 auto task = META_NS::MakeCallback<META_NS::ITaskQueueTask>(BASE_NS::move(func));
500 META_NS::GetTaskQueueRegistry().GetTaskQueue(ENGINE_THREAD)->AddTask(task);
501
502 return jsPromise;
503 }
504
Raycast(napi_env env,NapiApi::Object screenCoordJs,NapiApi::Object optionsJs)505 napi_value CameraJS::Raycast(napi_env env, NapiApi::Object screenCoordJs, NapiApi::Object optionsJs)
506 {
507 auto scene = NapiApi::StrongRef {};
508 auto raycastSelf = SCENE_NS::ICameraRayCast::Ptr {};
509 auto screenCoord = BASE_NS::Math::Vec2 {};
510 if (!ExtractRaycastStuff(screenCoordJs, scene, raycastSelf, screenCoord)) {
511 return {};
512 }
513
514 auto options = ToNativeOptions(env, optionsJs);
515 const auto hitResults = raycastSelf->CastRay(screenCoord, options).GetResult();
516
517 napi_value hitList;
518 napi_create_array_with_length(env, hitResults.size(), &hitList);
519 size_t i = 0;
520 for (const auto& hitResult : hitResults) {
521 auto hitObject = CreateRaycastResult(scene, env, hitResult);
522 napi_set_element(env, hitList, i, hitObject.ToNapiValue());
523 i++;
524 }
525 return hitList;
526 }
527
528 template<typename CoordType>
ExtractRaycastStuff(const NapiApi::Object & jsCoord,NapiApi::StrongRef & scene,SCENE_NS::ICameraRayCast::Ptr & raycastSelf,CoordType & nativeCoord)529 bool CameraJS::ExtractRaycastStuff(const NapiApi::Object& jsCoord, NapiApi::StrongRef& scene,
530 SCENE_NS::ICameraRayCast::Ptr& raycastSelf, CoordType& nativeCoord)
531 {
532 scene = NapiApi::StrongRef { scene_.GetObject() };
533 if (!scene.GetValue()) {
534 LOG_E("Scene is gone");
535 return false;
536 }
537
538 raycastSelf = interface_pointer_cast<SCENE_NS::ICameraRayCast>(GetNativeObject());
539 if (!raycastSelf) {
540 LOG_F("Unable to access raycast API");
541 return false;
542 }
543
544 bool conversionOk = false;
545 if constexpr (BASE_NS::is_same_v<CoordType, BASE_NS::Math::Vec2>) {
546 nativeCoord = Vec2Proxy::ToNative(jsCoord, conversionOk);
547 } else {
548 nativeCoord = Vec3Proxy::ToNative(jsCoord, conversionOk);
549 }
550 if (!conversionOk) {
551 LOG_E("Invalid position argument");
552 return false;
553 }
554 return true;
555 }
556
CreateObject(const META_NS::ClassInfo & type)557 META_NS::IObject::Ptr CameraJS::CreateObject(const META_NS::ClassInfo& type)
558 {
559 if (auto scn = GetNativeMeta<SCENE_NS::IScene>(scene_.GetObject())) {
560 META_NS::IObject::Ptr obj = scn->CreateObject(type).GetResult();
561 if (obj) {
562 resources_[(uintptr_t)obj.get()] = obj;
563 }
564 return obj;
565 }
566 return nullptr;
567 }
ReleaseObject(const META_NS::IObject::Ptr & obj)568 void CameraJS::ReleaseObject(const META_NS::IObject::Ptr& obj)
569 {
570 if (obj) {
571 resources_.erase((uintptr_t)obj.get());
572 }
573 }
574
GetMSAA(NapiApi::FunctionContext<> & ctx)575 napi_value CameraJS::GetMSAA(NapiApi::FunctionContext<>& ctx)
576 {
577 if (!validateSceneRef()) {
578 return ctx.GetUndefined();
579 }
580 bool enabled = false;
581 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
582 uint32_t curBits = camera->PipelineFlags()->GetValue();
583 enabled = curBits & static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
584 }
585 return ctx.GetBoolean(enabled);
586 }
587
SetMSAA(NapiApi::FunctionContext<bool> & ctx)588 void CameraJS::SetMSAA(NapiApi::FunctionContext<bool>& ctx)
589 {
590 if (!validateSceneRef()) {
591 return;
592 }
593 msaaEnabled_ = ctx.Arg<0>();
594 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
595 uint32_t curBits = camera->PipelineFlags()->GetValue();
596 if (msaaEnabled_) {
597 curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
598 } else {
599 curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
600 }
601 if (clearColorEnabled_) {
602 curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
603 } else {
604 curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
605 }
606 camera->PipelineFlags()->SetValue(curBits);
607 }
608 }
609