• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #include "CameraJS.h"
16 
17 #include <meta/api/make_callback.h>
18 #include <meta/interface/intf_task_queue_registry.h>
19 #include <scene/interface/intf_camera.h>
20 #include <scene/interface/intf_node.h>
21 #include <scene/interface/intf_raycast.h>
22 #include <scene/interface/intf_scene.h>
23 
24 #include "ParamParsing.h"
25 #include "Promise.h"
26 #include "Raycast.h"
27 #include "SceneJS.h"
28 #include "Vec2Proxy.h"
29 #include "Vec3Proxy.h"
30 #include "nodejstaskqueue.h"
31 
32 static constexpr uint32_t ACTIVE_RENDER_BIT = 1; //  CameraComponent::ACTIVE_RENDER_BIT  comes from lume3d...
33 
GetInstanceImpl(uint32_t id)34 void* CameraJS::GetInstanceImpl(uint32_t id)
35 {
36     if (id == CameraJS::ID) {
37         return this;
38     }
39     return NodeImpl::GetInstanceImpl(id);
40 }
DisposeNative(void * sc)41 void CameraJS::DisposeNative(void* sc)
42 {
43     if (!disposed_) {
44         LOG_V("CameraJS::DisposeNative");
45         disposed_ = true;
46         auto cam = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject());
47 
48         if (auto* sceneJS = static_cast<SceneJS*>(sc)) {
49             sceneJS->ReleaseStrongDispose(reinterpret_cast<uintptr_t>(&scene_));
50         }
51 
52         // make sure we release postProc settings
53         if (auto ps = postProc_.GetObject()) {
54             NapiApi::Function func = ps.Get<NapiApi::Function>("destroy");
55             if (func) {
56                 func.Invoke(ps);
57             }
58         }
59         postProc_.Reset();
60 
61         clearColor_.reset();
62         if (cam) {
63             UnsetNativeObject();
64 
65             auto ptr = cam->PostProcess()->GetValue();
66             ReleaseObject(interface_pointer_cast<META_NS::IObject>(ptr));
67             ptr.reset();
68             cam->PostProcess()->SetValue(nullptr);
69             // dispose all extra objects.
70             resources_.clear();
71 
72             if (!IsAttached()) {
73                 cam->SetActive(false);
74                 if (auto node = interface_pointer_cast<SCENE_NS::INode>(cam)) {
75                     if (auto scene = node->GetScene()) {
76                         scene->RemoveNode(BASE_NS::move(node)).Wait();
77                     }
78                 }
79             }
80         }
81         scene_.Reset();
82     }
83 }
Init(napi_env env,napi_value exports)84 void CameraJS::Init(napi_env env, napi_value exports)
85 {
86     BASE_NS::vector<napi_property_descriptor> node_props;
87     NodeImpl::GetPropertyDescs(node_props);
88 
89     using namespace NapiApi;
90     node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetFov, &CameraJS::SetFov>("fov"));
91     node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetNear, &CameraJS::SetNear>("nearPlane"));
92     node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetFar, &CameraJS::SetFar>("farPlane"));
93     node_props.push_back(GetSetProperty<bool, CameraJS, &CameraJS::GetEnabled, &CameraJS::SetEnabled>("enabled"));
94     node_props.push_back(GetSetProperty<bool, CameraJS, &CameraJS::GetMSAA, &CameraJS::SetMSAA>("msaa"));
95     node_props.push_back(
96         GetSetProperty<Object, CameraJS, &CameraJS::GetPostProcess, &CameraJS::SetPostProcess>("postProcess"));
97     node_props.push_back(GetSetProperty<Object, CameraJS, &CameraJS::GetColor, &CameraJS::SetColor>("clearColor"));
98 
99     node_props.push_back(Method<FunctionContext<Object>, CameraJS, &CameraJS::WorldToScreen>("worldToScreen"));
100     node_props.push_back(Method<FunctionContext<Object>, CameraJS, &CameraJS::ScreenToWorld>("screenToWorld"));
101     node_props.push_back(Method<FunctionContext<Object, Object>, CameraJS, &CameraJS::Raycast>("raycast"));
102 
103     napi_value func;
104     auto status = napi_define_class(env, "Camera", NAPI_AUTO_LENGTH, BaseObject::ctor<CameraJS>(), nullptr,
105         node_props.size(), node_props.data(), &func);
106 
107     NapiApi::MyInstanceState* mis;
108     NapiApi::MyInstanceState::GetInstance(env, (void**)&mis);
109     if (mis) {
110         mis->StoreCtor("Camera", func);
111     }
112 }
113 
CameraJS(napi_env e,napi_callback_info i)114 CameraJS::CameraJS(napi_env e, napi_callback_info i) : BaseObject(e, i), NodeImpl(NodeImpl::CAMERA)
115 {
116     NapiApi::FunctionContext<NapiApi::Object, NapiApi::Object> fromJs(e, i);
117     if (!fromJs) {
118         // no arguments. so internal create.
119         // expecting caller to finish initialization
120         return;
121     }
122     // java script call.. with arguments
123     NapiApi::Object scene = fromJs.Arg<0>();
124     scene_ = scene;
125 
126     auto scn = scene.GetNative<SCENE_NS::IScene>();
127     if (scn == nullptr) {
128         // hmm..
129         LOG_F("Invalid scene for CameraJS!");
130         return;
131     }
132 
133     NapiApi::Object meJs(fromJs.This());
134     if (const auto sceneJS = scene.GetJsWrapper<SceneJS>()) {
135         sceneJS->StrongDisposeHook(reinterpret_cast<uintptr_t>(&scene_), meJs);
136     }
137 
138     auto node = GetNativeObject<SCENE_NS::ICamera>();
139     if (!node) {
140         LOG_E("Cannot finish creating a camera: Native camera object missing");
141         assert(false);
142         return;
143     }
144 
145     auto sceneNodeParameters = NapiApi::Object { fromJs.Arg<1>() };
146     if (const auto name = ExtractName(sceneNodeParameters); !name.empty()) {
147         meJs.Set("name", name);
148     }
149     meJs.Set("postProcess", fromJs.GetNull());
150 }
Finalize(napi_env env)151 void CameraJS::Finalize(napi_env env)
152 {
153     // make sure the camera gets deactivated (the actual c++ camera might not be destroyed here)
154     if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
155         if (auto scene = interface_cast<SCENE_NS::INode>(camera)->GetScene()) {
156             camera->SetActive(false);
157         }
158     }
159     DisposeNative(scene_.GetObject().GetJsWrapper<SceneJS>());
160     BaseObject::Finalize(env);
161 }
~CameraJS()162 CameraJS::~CameraJS()
163 {
164     LOG_V("CameraJS -- ");
165 }
GetFov(NapiApi::FunctionContext<> & ctx)166 napi_value CameraJS::GetFov(NapiApi::FunctionContext<>& ctx)
167 {
168     if (!validateSceneRef()) {
169         return ctx.GetUndefined();
170     }
171 
172     float fov = 0.0;
173     if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
174         fov = 0.0;
175         if (camera) {
176             fov = camera->FoV()->GetValue();
177         }
178     }
179 
180     return ctx.GetNumber(fov);
181 }
182 
SetFov(NapiApi::FunctionContext<float> & ctx)183 void CameraJS::SetFov(NapiApi::FunctionContext<float>& ctx)
184 {
185     if (!validateSceneRef()) {
186         return;
187     }
188     float fov = ctx.Arg<0>();
189     if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
190         camera->FoV()->SetValue(fov);
191     }
192 }
193 
GetEnabled(NapiApi::FunctionContext<> & ctx)194 napi_value CameraJS::GetEnabled(NapiApi::FunctionContext<>& ctx)
195 {
196     if (!validateSceneRef()) {
197         return ctx.GetUndefined();
198     }
199     bool activ = false;
200     if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
201         if (camera) {
202             activ = camera->IsActive();
203         }
204     }
205     return ctx.GetBoolean(activ);
206 }
207 
SetEnabled(NapiApi::FunctionContext<bool> & ctx)208 void CameraJS::SetEnabled(NapiApi::FunctionContext<bool>& ctx)
209 {
210     if (!validateSceneRef()) {
211         return;
212     }
213     bool activ = ctx.Arg<0>();
214     if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
215         ExecSyncTask([camera, activ]() {
216             if (camera) {
217                 uint32_t flags = camera->SceneFlags()->GetValue();
218                 if (activ) {
219                     flags |= uint32_t(SCENE_NS::CameraSceneFlag::MAIN_CAMERA_BIT);
220                 } else {
221                     flags &= ~uint32_t(SCENE_NS::CameraSceneFlag::MAIN_CAMERA_BIT);
222                 }
223                 camera->SceneFlags()->SetValue(flags);
224                 camera->SetActive(activ);
225             }
226             return META_NS::IAny::Ptr {};
227         });
228     }
229 }
230 
GetFar(NapiApi::FunctionContext<> & ctx)231 napi_value CameraJS::GetFar(NapiApi::FunctionContext<>& ctx)
232 {
233     if (!validateSceneRef()) {
234         return ctx.GetUndefined();
235     }
236     float far = 0.0;
237     if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
238         far = camera->FarPlane()->GetValue();
239     }
240     return ctx.GetNumber(far);
241 }
242 
SetFar(NapiApi::FunctionContext<float> & ctx)243 void CameraJS::SetFar(NapiApi::FunctionContext<float>& ctx)
244 {
245     float fov = ctx.Arg<0>();
246     if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
247         camera->FarPlane()->SetValue(fov);
248     }
249 }
250 
GetNear(NapiApi::FunctionContext<> & ctx)251 napi_value CameraJS::GetNear(NapiApi::FunctionContext<>& ctx)
252 {
253     if (!validateSceneRef()) {
254         return ctx.GetUndefined();
255     }
256     float near = 0.0;
257     if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
258         near = camera->NearPlane()->GetValue();
259     }
260     return ctx.GetNumber(near);
261 }
262 
SetNear(NapiApi::FunctionContext<float> & ctx)263 void CameraJS::SetNear(NapiApi::FunctionContext<float>& ctx)
264 {
265     if (!validateSceneRef()) {
266         return;
267     }
268     float fov = ctx.Arg<0>();
269     if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
270         camera->NearPlane()->SetValue(fov);
271     }
272 }
273 
GetPostProcess(NapiApi::FunctionContext<> & ctx)274 napi_value CameraJS::GetPostProcess(NapiApi::FunctionContext<>& ctx)
275 {
276     if (!validateSceneRef()) {
277         return ctx.GetUndefined();
278     }
279     if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
280         auto postproc = camera->PostProcess()->GetValue();
281         if (!postproc) {
282             if (auto cameraJs = static_cast<CameraJS *>(ctx.This().GetRoot())) {
283                 postproc = interface_pointer_cast<SCENE_NS::IPostProcess>(
284                     cameraJs->CreateObject(SCENE_NS::ClassId::PostProcess));
285                 camera->PostProcess()->SetValue(postproc);
286             }
287         }
288         NapiApi::Env env(ctx.Env());
289         NapiApi::Object parms(env);
290         napi_value args[] = { ctx.This().ToNapiValue(), parms.ToNapiValue() };
291         // take ownership of the object.
292         postProc_ = NapiApi::StrongRef(CreateFromNativeInstance(env, postproc, PtrType::WEAK, args));
293         return postProc_.GetValue();
294     }
295     return ctx.GetNull();
296 }
297 
SetPostProcess(NapiApi::FunctionContext<NapiApi::Object> & ctx)298 void CameraJS::SetPostProcess(NapiApi::FunctionContext<NapiApi::Object>& ctx)
299 {
300     if (!validateSceneRef()) {
301         return;
302     }
303 
304     auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject());
305     if (!camera) {
306         return;
307     }
308     NapiApi::Object psp = ctx.Arg<0>();
309     if (auto currentlySet = postProc_.GetObject()) {
310         if (psp.StrictEqual(currentlySet)) {
311             // setting the exactly the same postprocess setting. do nothing.
312             return;
313         }
314         NapiApi::Function func = currentlySet.Get<NapiApi::Function>("destroy");
315         if (func) {
316             func.Invoke(currentlySet);
317         }
318         postProc_.Reset();
319     }
320 
321     SCENE_NS::IPostProcess::Ptr postproc;
322     if (psp) {
323         // see if we have a native backing for the input object..
324         TrueRootObject* native = psp.GetRoot();
325         if (!native) {
326             // nope.. so create a new bridged object.
327             napi_value args[] = {
328                 ctx.This().ToNapiValue(),  // Camera..
329                 ctx.Arg<0>().ToNapiValue() // "javascript object for values"
330             };
331             if (auto cameraJs = static_cast<CameraJS *>(ctx.This().GetRoot())) {
332                 auto postproc = cameraJs->CreateObject(SCENE_NS::ClassId::PostProcess);
333                 // PostProcessSettings will store a weak ref of its native. We, the camera, own it.
334                 psp = CreateFromNativeInstance(ctx.Env(), postproc, PtrType::WEAK, args);
335                 native = psp.GetRoot();
336             }
337         }
338         postProc_ = NapiApi::StrongRef(psp);
339 
340         if (native) {
341             postproc = interface_pointer_cast<SCENE_NS::IPostProcess>(native->GetNativeObject());
342         }
343     }
344     camera->PostProcess()->SetValue(postproc);
345 }
346 
GetColor(NapiApi::FunctionContext<> & ctx)347 napi_value CameraJS::GetColor(NapiApi::FunctionContext<>& ctx)
348 {
349     if (!validateSceneRef()) {
350         return ctx.GetUndefined();
351     }
352 
353     auto camera = ctx.This().GetNative<SCENE_NS::ICamera>();
354     if (!camera) {
355         return ctx.GetUndefined();
356     }
357     uint32_t curBits = camera->PipelineFlags()->GetValue();
358     bool enabled = curBits & static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
359     if (!enabled) {
360         return ctx.GetNull();
361     }
362 
363     if (clearColor_ == nullptr) {
364         // camera->ClearColor() is of type Vec4, convert to Color on the fly
365         clearColor_ = BASE_NS::make_unique<ColorProxy>(ctx.Env(), camera->ClearColor());
366     }
367     return clearColor_->Value();
368 }
SetColor(NapiApi::FunctionContext<NapiApi::Object> & ctx)369 void CameraJS::SetColor(NapiApi::FunctionContext<NapiApi::Object>& ctx)
370 {
371     if (!validateSceneRef()) {
372         return;
373     }
374     auto camera = ctx.This().GetNative<SCENE_NS::ICamera>();
375     if (!camera) {
376         return;
377     }
378     if (clearColor_ == nullptr) {
379         clearColor_ = BASE_NS::make_unique<ColorProxy>(ctx.Env(), camera->ClearColor());
380     }
381     NapiApi::Object obj = ctx.Arg<0>();
382     if (obj) {
383         clearColor_->SetValue(obj);
384         clearColorEnabled_ = true;
385     } else {
386         clearColorEnabled_ = false;
387     }
388     // enable camera clear
389     uint32_t curBits = camera->PipelineFlags()->GetValue();
390     if (msaaEnabled_) {
391         curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
392     } else {
393         curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
394     }
395     if (clearColorEnabled_) {
396         curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
397     } else {
398         curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
399     }
400     camera->PipelineFlags()->SetValue(curBits);
401 }
402 
WorldToScreen(NapiApi::FunctionContext<NapiApi::Object> & ctx)403 napi_value CameraJS::WorldToScreen(NapiApi::FunctionContext<NapiApi::Object>& ctx)
404 {
405     return ProjectCoords<ProjectionDirection::WORLD_TO_SCREEN>(ctx);
406 }
407 
ScreenToWorld(NapiApi::FunctionContext<NapiApi::Object> & ctx)408 napi_value CameraJS::ScreenToWorld(NapiApi::FunctionContext<NapiApi::Object>& ctx)
409 {
410     return ProjectCoords<ProjectionDirection::SCREEN_TO_WORLD>(ctx);
411 }
412 
413 template<CameraJS::ProjectionDirection dir>
ProjectCoords(NapiApi::FunctionContext<NapiApi::Object> & ctx)414 napi_value CameraJS::ProjectCoords(NapiApi::FunctionContext<NapiApi::Object>& ctx)
415 {
416     auto inCoordJs = ctx.Arg<0>();
417     const auto res = GetRaycastResources<BASE_NS::Math::Vec3>(inCoordJs);
418     if (!res.hasEverything) {
419         LOG_E("%s", res.errorMsg.c_str());
420         return {};
421     }
422 
423     auto outCoord = BASE_NS::Math::Vec3 {};
424     if constexpr (dir == ProjectionDirection::WORLD_TO_SCREEN) {
425         outCoord = res.raycastSelf->WorldPositionToScreen(res.nativeCoord).GetResult();
426     } else {
427         outCoord = res.raycastSelf->ScreenPositionToWorld(res.nativeCoord).GetResult();
428     }
429     return Vec3Proxy::ToNapiObject(outCoord, ctx.Env()).ToNapiValue();
430 }
431 
Raycast(NapiApi::FunctionContext<NapiApi::Object,NapiApi::Object> & ctx)432 napi_value CameraJS::Raycast(NapiApi::FunctionContext<NapiApi::Object, NapiApi::Object>& ctx)
433 {
434     const auto env = ctx.Env();
435     auto promise = Promise(env);
436     NapiApi::Object screenCoordJs = ctx.Arg<0>();
437     NapiApi::Object optionsJs = ctx.Arg<1>();
438     auto res = GetRaycastResources<BASE_NS::Math::Vec2>(screenCoordJs);
439     if (!res.hasEverything) {
440         return promise.Reject(res.errorMsg);
441     }
442 
443     auto convertToJs = [promise, scene = BASE_NS::move(res.scene)](SCENE_NS::NodeHits hitResults) mutable {
444         const auto env = promise.Env();
445         napi_value hitList;
446         napi_create_array_with_length(env, hitResults.size(), &hitList);
447         size_t i = 0;
448         for (const auto& hitResult : hitResults) {
449             const auto hitObject = CreateRaycastResult(scene, env, hitResult);
450             napi_set_element(env, hitList, i, hitObject.ToNapiValue());
451             i++;
452         }
453         promise.Resolve(hitList);
454     };
455 
456     const auto options = ToNativeOptions(env, optionsJs);
457     auto jsQ = META_NS::GetTaskQueueRegistry().GetTaskQueue(JS_THREAD_DEP);
458     res.raycastSelf->CastRay(res.nativeCoord, options).Then(BASE_NS::move(convertToJs), jsQ);
459     return promise;
460 }
461 
462 template<typename CoordType>
GetRaycastResources(const NapiApi::Object & jsCoord)463 CameraJS::RaycastResources<CoordType> CameraJS::GetRaycastResources(const NapiApi::Object& jsCoord)
464 {
465     auto res = RaycastResources<CoordType> {};
466     res.scene = NapiApi::StrongRef { scene_.GetObject() };
467     if (!res.scene.GetValue()) {
468         res.errorMsg = "Scene is gone. ";
469     }
470 
471     res.raycastSelf = interface_pointer_cast<SCENE_NS::ICameraRayCast>(GetNativeObject());
472     if (!res.raycastSelf) {
473         res.errorMsg.append("Unable to access raycast API. ");
474     }
475 
476     auto conversionOk = false;
477     if constexpr (BASE_NS::is_same_v<CoordType, BASE_NS::Math::Vec2>) {
478         res.nativeCoord = Vec2Proxy::ToNative(jsCoord, conversionOk);
479     } else {
480         res.nativeCoord = Vec3Proxy::ToNative(jsCoord, conversionOk);
481     }
482     if (!conversionOk) {
483         res.errorMsg.append("Invalid position argument given");
484     }
485     res.hasEverything = res.errorMsg.empty();
486     return res;
487 }
488 
CreateObject(const META_NS::ClassInfo & type)489 META_NS::IObject::Ptr CameraJS::CreateObject(const META_NS::ClassInfo& type)
490 {
491     if (auto scn = scene_.GetObject().GetNative<SCENE_NS::IScene>()) {
492         META_NS::IObject::Ptr obj = scn->CreateObject(type).GetResult();
493         if (obj) {
494             resources_[(uintptr_t)obj.get()] = obj;
495         }
496         return obj;
497     }
498     return nullptr;
499 }
ReleaseObject(const META_NS::IObject::Ptr & obj)500 void CameraJS::ReleaseObject(const META_NS::IObject::Ptr& obj)
501 {
502     if (obj) {
503         resources_.erase((uintptr_t)obj.get());
504     }
505 }
506 
GetMSAA(NapiApi::FunctionContext<> & ctx)507 napi_value CameraJS::GetMSAA(NapiApi::FunctionContext<>& ctx)
508 {
509     if (!validateSceneRef()) {
510         return ctx.GetUndefined();
511     }
512     bool enabled = false;
513     if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
514         uint32_t curBits = camera->PipelineFlags()->GetValue();
515         enabled = curBits & static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
516     }
517     return ctx.GetBoolean(enabled);
518 }
519 
SetMSAA(NapiApi::FunctionContext<bool> & ctx)520 void CameraJS::SetMSAA(NapiApi::FunctionContext<bool>& ctx)
521 {
522     if (!validateSceneRef()) {
523         return;
524     }
525     msaaEnabled_ = ctx.Arg<0>();
526     if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
527         uint32_t curBits = camera->PipelineFlags()->GetValue();
528         if (msaaEnabled_) {
529             curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
530         } else {
531             curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::MSAA_BIT);
532         }
533         if (clearColorEnabled_) {
534             curBits |= static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
535         } else {
536             curBits &= ~static_cast<uint32_t>(SCENE_NS::CameraPipelineFlag::CLEAR_COLOR_BIT);
537         }
538         camera->PipelineFlags()->SetValue(curBits);
539     }
540 }
541