1 /*
2 * Copyright (C) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include "CameraJS.h"
16
17 #include <meta/api/make_callback.h>
18 #include <meta/interface/intf_task_queue.h>
19 #include <meta/interface/intf_task_queue_registry.h>
20 #include <scene_plugin/api/camera_uid.h>
21 #include <scene_plugin/interface/intf_camera.h>
22 #include <scene_plugin/interface/intf_scene.h>
23
24 #include "SceneJS.h"
25 static constexpr uint32_t ACTIVE_RENDER_BIT = 1; // CameraComponent::ACTIVE_RENDER_BIT comes from lume3d...
26
GetInstanceImpl(uint32_t id)27 void* CameraJS::GetInstanceImpl(uint32_t id)
28 {
29 if (id == CameraJS::ID) {
30 return this;
31 }
32 return NodeImpl::GetInstanceImpl(id);
33 }
DisposeNative()34 void CameraJS::DisposeNative()
35 {
36 if (!disposed_) {
37 LOG_F("CameraJS::DisposeNative");
38 disposed_ = true;
39
40 NapiApi::Object obj = scene_.GetObject();
41 auto* tro = obj.Native<TrueRootObject>();
42 if (tro) {
43 SceneJS* sceneJS = ((SceneJS*)tro->GetInstanceImpl(SceneJS::ID));
44 if (sceneJS) {
45 sceneJS->ReleaseStrongDispose((uintptr_t)&scene_);
46 }
47 }
48
49 // make sure we release postProc settings
50 if (auto ps = postProc_.GetObject()) {
51 NapiApi::Function func = ps.Get<NapiApi::Function>("destroy");
52 if (func) {
53 func.Invoke(ps);
54 }
55 }
56 postProc_.Reset();
57
58 clearColor_.reset();
59 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
60 // reset the native object refs
61 SetNativeObject(nullptr, false);
62 SetNativeObject(nullptr, true);
63
64 ExecSyncTask([this, cam = BASE_NS::move(camera), res = BASE_NS::move(resources_)]() mutable {
65 auto ptr = cam->PostProcess()->GetValue();
66 ReleaseObject(interface_pointer_cast<META_NS::IObject>(ptr));
67 ptr.reset();
68 cam->PostProcess()->SetValue(nullptr);
69 // dispose all extra objects.
70 res.clear();
71
72 auto camnode = interface_pointer_cast<SCENE_NS::INode>(cam);
73 if (camnode == nullptr) {
74 return META_NS::IAny::Ptr {};
75 }
76 auto scene = camnode->GetScene();
77 if (scene == nullptr) {
78 return META_NS::IAny::Ptr {};
79 }
80 scene->DeactivateCamera(cam);
81 scene->ReleaseNode(camnode);
82 return META_NS::IAny::Ptr {};
83 });
84 }
85 scene_.Reset();
86 }
87 }
Init(napi_env env,napi_value exports)88 void CameraJS::Init(napi_env env, napi_value exports)
89 {
90 BASE_NS::vector<napi_property_descriptor> node_props;
91 NodeImpl::GetPropertyDescs(node_props);
92
93 using namespace NapiApi;
94 node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetFov, &CameraJS::SetFov>("fov"));
95 node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetNear, &CameraJS::SetNear>("nearPlane"));
96 node_props.push_back(GetSetProperty<float, CameraJS, &CameraJS::GetFar, &CameraJS::SetFar>("farPlane"));
97 node_props.push_back(GetSetProperty<bool, CameraJS, &CameraJS::GetEnabled, &CameraJS::SetEnabled>("enabled"));
98 node_props.push_back(GetSetProperty<bool, CameraJS, &CameraJS::GetMSAA, &CameraJS::SetMSAA>("msaa"));
99 node_props.push_back(
100 GetSetProperty<Object, CameraJS, &CameraJS::GetPostProcess, &CameraJS::SetPostProcess>("postProcess"));
101 node_props.push_back(GetSetProperty<Object, CameraJS, &CameraJS::GetColor, &CameraJS::SetColor>("clearColor"));
102
103 napi_value func;
104 auto status = napi_define_class(env, "Camera", NAPI_AUTO_LENGTH, BaseObject::ctor<CameraJS>(), nullptr,
105 node_props.size(), node_props.data(), &func);
106
107 NapiApi::MyInstanceState* mis;
108 napi_get_instance_data(env, (void**)&mis);
109 mis->StoreCtor("Camera", func);
110 }
111
CameraJS(napi_env e,napi_callback_info i)112 CameraJS::CameraJS(napi_env e, napi_callback_info i) : BaseObject<CameraJS>(e, i), NodeImpl(NodeImpl::CAMERA)
113 {
114 NapiApi::FunctionContext<NapiApi::Object, NapiApi::Object> fromJs(e, i);
115 if (!fromJs) {
116 // no arguments. so internal create.
117 // expecting caller to finish initialization
118 return;
119 }
120 // java script call.. with arguments
121 NapiApi::Object scene = fromJs.Arg<0>();
122 scene_ = scene;
123
124 auto scn = GetNativeMeta<SCENE_NS::IScene>(scene);
125 if (scn == nullptr) {
126 CORE_LOG_F("Invalid scene for CameraJS!");
127 return;
128 }
129
130 NapiApi::Object meJs(e, fromJs.This());
131 auto* tro = scene.Native<TrueRootObject>();
132 auto* sceneJS = ((SceneJS*)tro->GetInstanceImpl(SceneJS::ID));
133 sceneJS->StrongDisposeHook((uintptr_t)&scene_, meJs);
134
135 NapiApi::Object args = fromJs.Arg<1>();
136 auto obj = GetNativeObjectParam<META_NS::IObject>(args);
137 if (obj) {
138 // linking to an existing object.
139 NapiApi::Object meJs(e, fromJs.This());
140 SetNativeObject(obj, false);
141 StoreJsObj(obj, meJs);
142 return;
143 }
144
145 // collect parameters
146 NapiApi::Value<BASE_NS::string> name;
147 NapiApi::Value<BASE_NS::string> path;
148
149 if (auto prm = args.Get("name")) {
150 name = NapiApi::Value<BASE_NS::string>(e, prm);
151 }
152 if (auto prm = args.Get("path")) {
153 path = NapiApi::Value<BASE_NS::string>(e, prm);
154 }
155
156 uint32_t pipeline = SCENE_NS::ICamera::SceneCameraPipeline::SCENE_CAM_PIPELINE_LIGHT_FORWARD;
157 if (auto prm = args.Get("renderPipeline")) {
158 pipeline = NapiApi::Value<uint32_t>(e, prm);
159 }
160
161 BASE_NS::string nodePath;
162
163 if (path) {
164 // create using path
165 nodePath = path.valueOrDefault("");
166 } else if (name) {
167 // use the name as path (creates under root)
168 nodePath = name.valueOrDefault("");
169 } else {
170 // no name or path defined should this just fail?
171 }
172
173 // Create actual camera object.
174 SCENE_NS::ICamera::Ptr node;
175 ExecSyncTask([scn, nodePath, &node, pipeline]() {
176 node = scn->CreateNode<SCENE_NS::ICamera>(nodePath, true);
177 node->RenderingPipeline()->SetValue(pipeline);
178 scn->DeactivateCamera(node);
179 return META_NS::IAny::Ptr {};
180 });
181
182 SetNativeObject(interface_pointer_cast<META_NS::IObject>(node), false);
183 node.reset();
184 StoreJsObj(GetNativeObject(), meJs);
185
186 if (name) {
187 // set the name of the object. if we were given one
188 meJs.Set("name", name);
189 }
190 meJs.Set("postProcess", fromJs.GetNull());
191 }
Finalize(napi_env env)192 void CameraJS::Finalize(napi_env env)
193 {
194 // make sure the camera gets deactivated (the actual c++ camera might not be destroyed here)
195 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
196 ExecSyncTask([camera]() {
197 if (auto scene = interface_cast<SCENE_NS::INode>(camera)->GetScene()) {
198 scene->DeactivateCamera(camera);
199 }
200 return META_NS::IAny::Ptr {};
201 });
202 }
203 BaseObject<CameraJS>::Finalize(env);
204 }
~CameraJS()205 CameraJS::~CameraJS()
206 {
207 LOG_F("CameraJS -- ");
208 }
GetFov(NapiApi::FunctionContext<> & ctx)209 napi_value CameraJS::GetFov(NapiApi::FunctionContext<>& ctx)
210 {
211 float fov = 0.0;
212 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
213 ExecSyncTask([camera, &fov]() {
214 fov = 0.0;
215 if (camera) {
216 fov = camera->FoV()->GetValue();
217 }
218 return META_NS::IAny::Ptr {};
219 });
220 }
221
222 napi_value value;
223 napi_status status = napi_create_double(ctx, fov, &value);
224 return value;
225 }
226
SetFov(NapiApi::FunctionContext<float> & ctx)227 void CameraJS::SetFov(NapiApi::FunctionContext<float>& ctx)
228 {
229 float fov = ctx.Arg<0>();
230 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
231 ExecSyncTask([camera, fov]() {
232 camera->FoV()->SetValue(fov);
233 return META_NS::IAny::Ptr {};
234 });
235 }
236 }
237
GetEnabled(NapiApi::FunctionContext<> & ctx)238 napi_value CameraJS::GetEnabled(NapiApi::FunctionContext<>& ctx)
239 {
240 bool activ = false;
241 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
242 ExecSyncTask([camera, &activ]() {
243 if (camera) {
244 if (auto scene = interface_cast<SCENE_NS::INode>(camera)->GetScene()) {
245 activ = scene->IsCameraActive(camera);
246 }
247 }
248 return META_NS::IAny::Ptr {};
249 });
250 }
251 napi_value value;
252 napi_status status = napi_get_boolean(ctx, activ, &value);
253 return value;
254 }
255
SetEnabled(NapiApi::FunctionContext<bool> & ctx)256 void CameraJS::SetEnabled(NapiApi::FunctionContext<bool>& ctx)
257 {
258 bool activ = ctx.Arg<0>();
259 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
260 ExecSyncTask([camera, activ]() {
261 if (auto scene = interface_cast<SCENE_NS::INode>(camera)->GetScene()) {
262 if (activ) {
263 scene->ActivateCamera(camera);
264 } else {
265 scene->DeactivateCamera(camera);
266 }
267 }
268 return META_NS::IAny::Ptr {};
269 });
270 }
271 }
272
GetFar(NapiApi::FunctionContext<> & ctx)273 napi_value CameraJS::GetFar(NapiApi::FunctionContext<>& ctx)
274 {
275 float fov = 0.0;
276 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
277 ExecSyncTask([camera, &fov]() {
278 fov = 0.0;
279 if (camera) {
280 fov = camera->FarPlane()->GetValue();
281 }
282 return META_NS::IAny::Ptr {};
283 });
284 }
285
286 napi_value value;
287 napi_status status = napi_create_double(ctx, fov, &value);
288 return value;
289 }
290
SetFar(NapiApi::FunctionContext<float> & ctx)291 void CameraJS::SetFar(NapiApi::FunctionContext<float>& ctx)
292 {
293 float fov = ctx.Arg<0>();
294 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
295 ExecSyncTask([camera, fov]() {
296 camera->FarPlane()->SetValue(fov);
297 return META_NS::IAny::Ptr {};
298 });
299 }
300 }
301
GetNear(NapiApi::FunctionContext<> & ctx)302 napi_value CameraJS::GetNear(NapiApi::FunctionContext<>& ctx)
303 {
304 float fov = 0.0;
305 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
306 ExecSyncTask([camera, &fov]() {
307 fov = 0.0;
308 if (camera) {
309 fov = camera->NearPlane()->GetValue();
310 }
311 return META_NS::IAny::Ptr {};
312 });
313 }
314
315 napi_value value;
316 napi_status status = napi_create_double(ctx, fov, &value);
317 return value;
318 }
319
SetNear(NapiApi::FunctionContext<float> & ctx)320 void CameraJS::SetNear(NapiApi::FunctionContext<float>& ctx)
321 {
322 float fov = ctx.Arg<0>();
323 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
324 ExecSyncTask([camera, fov]() {
325 camera->NearPlane()->SetValue(fov);
326 return META_NS::IAny::Ptr {};
327 });
328 }
329 }
330
GetPostProcess(NapiApi::FunctionContext<> & ctx)331 napi_value CameraJS::GetPostProcess(NapiApi::FunctionContext<>& ctx)
332 {
333 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
334 SCENE_NS::IPostProcess::Ptr postproc;
335 ExecSyncTask([camera, &postproc]() {
336 postproc = camera->PostProcess()->GetValue();
337 return META_NS::IAny::Ptr {};
338 });
339 auto obj = interface_pointer_cast<META_NS::IObject>(postproc);
340 if (auto cached = FetchJsObj(obj)) {
341 // always return the same js object.
342 return cached;
343 }
344 NapiApi::Object parms; /*empty object*/
345 napi_value args[] = { ctx.This(), // Camera..
346 parms };
347 napi_value postProcJS = CreateFromNativeInstance(ctx, obj, false, BASE_NS::countof(args), args);
348 postProc_ = { ctx, postProcJS }; // take ownership of the object.
349 return postProcJS;
350 }
351 return ctx.GetNull();
352 }
353
SetPostProcess(NapiApi::FunctionContext<NapiApi::Object> & ctx)354 void CameraJS::SetPostProcess(NapiApi::FunctionContext<NapiApi::Object>& ctx)
355 {
356 NapiApi::Object psp = ctx.Arg<0>();
357 if (auto currentlySet = postProc_.GetObject()) {
358 if ((napi_value)currentlySet == (napi_value)psp) {
359 // setting the exactly the same postprocess setting. do nothing.
360 return;
361 }
362 NapiApi::Function func = currentlySet.Get<NapiApi::Function>("destroy");
363 if (func) {
364 func.Invoke(currentlySet);
365 }
366 postProc_.Reset();
367 }
368
369 SCENE_NS::IPostProcess::Ptr postproc;
370 // see if we have a native backing for the input object..
371 TrueRootObject* native = psp.Native<TrueRootObject>();
372 if (!native) {
373 // nope.. so create a new bridged object.
374 napi_value args[] = {
375 ctx.This(), // Camera..
376 ctx.Arg<0>() // "javascript object for values"
377 };
378 psp = { GetJSConstructor(ctx, "PostProcessSettings"), BASE_NS::countof(args), args };
379 native = psp.Native<TrueRootObject>();
380 }
381 postProc_ = { ctx, psp };
382
383 if (native) {
384 postproc = interface_pointer_cast<SCENE_NS::IPostProcess>(native->GetNativeObject());
385 }
386 if (auto camera = interface_cast<SCENE_NS::ICamera>(GetNativeObject())) {
387 ExecSyncTask([camera, postproc = BASE_NS::move(postproc)]() {
388 camera->PostProcess()->SetValue(postproc);
389 return META_NS::IAny::Ptr {};
390 });
391 }
392 }
393
394 // the PipelineFlagBits are not declared in sceneplugin api..
395 namespace {
396
397 enum PipelineFlagBits : uint32_t {
398 /** Target clear flags depth. Override camera render node graph loadOp with clear.
399 * Without clear the default render node graph based loadOp is used. (Default pipelines use depth clear)
400 */
401 CLEAR_DEPTH_BIT = (1 << 0),
402 /** Target clear flags color. Override camera render node graph loadOp with clear.
403 * Without clear the default render node graph based loadOp is used. (Default pipelines do not use color clear)
404 */
405 CLEAR_COLOR_BIT = (1 << 1),
406 /** Enable MSAA for rendering. Only affects non deferred default pipelines. */
407 MSAA_BIT = (1 << 2),
408 /** Automatically use pre-pass if there are default material needs (e.g. for transmission). Automatic RNG
409 generation needs to be enabled for the ECS scene. */
410 ALLOW_COLOR_PRE_PASS_BIT = (1 << 3),
411 /** Force pre-pass every frame. Use for e.g. custom shaders without default material needs. Automatic RNG
412 generation needs to be enabled for the ECS scene. */
413 FORCE_COLOR_PRE_PASS_BIT = (1 << 4),
414 /** Store history (store history for next frame, needed for e.g. temporal filtering) */
415 HISTORY_BIT = (1 << 5),
416 /** Jitter camera. With Halton sampling */
417 JITTER_BIT = (1 << 6),
418 /** Output samplable velocity / normal */
419 VELOCITY_OUTPUT_BIT = (1 << 7),
420 /** Output samplable depth */
421 DEPTH_OUTPUT_BIT = (1 << 8),
422 /** Is a multi-view camera and is not be rendered separately at all
423 * The camera is added to other camera as multiViewCameras
424 */
425 MULTI_VIEW_ONLY_BIT = (1 << 9),
426 /** Generate environment cubemap dynamically for the camera
427 */
428 DYNAMIC_CUBEMAP_BIT = (1 << 10),
429 /** Disallow reflection plane for camera
430 */
431 DISALLOW_REFLECTION_BIT = (1 << 11),
432 };
433 }
434
GetColor(NapiApi::FunctionContext<> & ctx)435 napi_value CameraJS::GetColor(NapiApi::FunctionContext<>& ctx)
436 {
437 auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetThisNativeObject(ctx));
438 if (!camera) {
439 return {};
440 }
441 bool enabled { false };
442 ExecSyncTask([camera, &enabled]() {
443 // enable camera clear
444 uint32_t curBits = camera->PipelineFlags()->GetValue();
445 enabled = curBits & PipelineFlagBits::CLEAR_COLOR_BIT;
446 return META_NS::IAny::Ptr {};
447 });
448 if (!enabled) {
449 return ctx.GetNull();
450 }
451
452 if (clearColor_ == nullptr) {
453 clearColor_ = BASE_NS::make_unique<ColorProxy>(ctx, camera->ClearColor());
454 }
455 return clearColor_->Value();
456 }
SetColor(NapiApi::FunctionContext<NapiApi::Object> & ctx)457 void CameraJS::SetColor(NapiApi::FunctionContext<NapiApi::Object>& ctx)
458 {
459 auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetThisNativeObject(ctx));
460 if (!camera) {
461 return;
462 }
463 if (clearColor_ == nullptr) {
464 clearColor_ = BASE_NS::make_unique<ColorProxy>(ctx, camera->ClearColor());
465 }
466 NapiApi::Object obj = ctx.Arg<0>();
467 if (obj) {
468 clearColor_->SetValue(obj);
469 clearColorEnabled_ = true;
470 } else {
471 clearColorEnabled_ = false;
472 }
473 ExecSyncTask([camera, clearColorEnabled = clearColorEnabled_, msaaEnabled = msaaEnabled_]() {
474 // enable camera clear
475 uint32_t curBits = camera->PipelineFlags()->GetValue();
476 if (msaaEnabled) {
477 curBits |= PipelineFlagBits::MSAA_BIT;
478 } else {
479 curBits &= ~PipelineFlagBits::MSAA_BIT;
480 }
481 if (clearColorEnabled) {
482 curBits |= PipelineFlagBits::CLEAR_COLOR_BIT;
483 } else {
484 curBits &= ~PipelineFlagBits::CLEAR_COLOR_BIT;
485 }
486 camera->PipelineFlags()->SetValue(curBits);
487 return META_NS::IAny::Ptr {};
488 });
489 }
490
CreateObject(const META_NS::ClassInfo & type)491 META_NS::IObject::Ptr CameraJS::CreateObject(const META_NS::ClassInfo& type)
492 {
493 META_NS::IObject::Ptr obj = META_NS::GetObjectRegistry().Create(type);
494 if (obj) {
495 resources_[(uintptr_t)obj.get()] = obj;
496 }
497 return obj;
498 }
ReleaseObject(const META_NS::IObject::Ptr & obj)499 void CameraJS::ReleaseObject(const META_NS::IObject::Ptr& obj)
500 {
501 if (obj) {
502 resources_.erase((uintptr_t)obj.get());
503 }
504 }
505
GetMSAA(NapiApi::FunctionContext<> & ctx)506 napi_value CameraJS::GetMSAA(NapiApi::FunctionContext<>& ctx)
507 {
508 bool enabled = false;
509 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
510 ExecSyncTask([camera, &enabled]() {
511 uint32_t curBits = camera->PipelineFlags()->GetValue();
512 enabled = curBits & PipelineFlagBits::MSAA_BIT;
513 return META_NS::IAny::Ptr {};
514 });
515 }
516
517 napi_value value;
518 napi_status status = napi_get_boolean(ctx, enabled, &value);
519 return value;
520 }
521
SetMSAA(NapiApi::FunctionContext<bool> & ctx)522 void CameraJS::SetMSAA(NapiApi::FunctionContext<bool>& ctx)
523 {
524 msaaEnabled_ = ctx.Arg<0>();
525 if (auto camera = interface_pointer_cast<SCENE_NS::ICamera>(GetNativeObject())) {
526 ExecSyncTask([camera, msaaEnabled = msaaEnabled_, clearColorEnabled = clearColorEnabled_]() {
527 uint32_t curBits = camera->PipelineFlags()->GetValue();
528 if (msaaEnabled) {
529 curBits |= PipelineFlagBits::MSAA_BIT;
530 } else {
531 curBits &= ~PipelineFlagBits::MSAA_BIT;
532 }
533 if (clearColorEnabled) {
534 curBits |= PipelineFlagBits::CLEAR_COLOR_BIT;
535 } else {
536 curBits &= ~PipelineFlagBits::CLEAR_COLOR_BIT;
537 }
538 camera->PipelineFlags()->SetValue(curBits);
539 return META_NS::IAny::Ptr {};
540 });
541 }
542 }
543