• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "ecmascript/js_thread.h"
17 
18 #include "ecmascript/base/config.h"
19 #include "ecmascript/mem/tagged_state_word.h"
20 #include "ecmascript/runtime.h"
21 #include "ecmascript/debugger/js_debugger_manager.h"
22 #include "ecmascript/dependent_infos.h"
23 #include "ecmascript/ic/mega_ic_cache.h"
24 #include "ecmascript/js_date.h"
25 #include "ecmascript/js_object-inl.h"
26 #include "ecmascript/js_tagged_value.h"
27 #include "ecmascript/module/module_logger.h"
28 #include "ecmascript/module/js_module_manager.h"
29 #include "ecmascript/runtime_call_id.h"
30 #include "macros.h"
31 
32 #if !defined(PANDA_TARGET_WINDOWS) && !defined(PANDA_TARGET_MACOS) && !defined(PANDA_TARGET_IOS)
33 #include <sys/resource.h>
34 #endif
35 
36 #if defined(ENABLE_EXCEPTION_BACKTRACE)
37 #include "ecmascript/platform/backtrace.h"
38 #endif
39 #if defined(ECMASCRIPT_SUPPORT_CPUPROFILER)
40 #include "ecmascript/dfx/cpu_profiler/cpu_profiler.h"
41 #endif
42 #include "common_components/heap/allocator/region_desc.h"
43 #include "ecmascript/dfx/vm_thread_control.h"
44 #include "ecmascript/ecma_global_storage.h"
45 #include "ecmascript/ic/properties_cache.h"
46 #include "ecmascript/interpreter/interpreter.h"
47 #include "ecmascript/mem/concurrent_marker.h"
48 #include "ecmascript/platform/file.h"
49 #include "ecmascript/jit/jit.h"
50 #include "common_interfaces/thread/thread_holder_manager.h"
51 #include "ecmascript/checkpoint/thread_state_transition.h"
52 #include "ecmascript/platform/asm_stack.h"
53 
54 namespace panda::ecmascript {
55 uintptr_t TaggedStateWord::BASE_ADDRESS = 0;
56 using CommonStubCSigns = panda::ecmascript::kungfu::CommonStubCSigns;
57 using BytecodeStubCSigns = panda::ecmascript::kungfu::BytecodeStubCSigns;
58 using BuiltinsStubCSigns = panda::ecmascript::kungfu::BuiltinsStubCSigns;
59 
Wait()60 void SuspendBarrier::Wait()
61 {
62     while (true) {
63         int32_t curCount = passBarrierCount_.load(std::memory_order_relaxed);
64         if (LIKELY(curCount > 0)) {
65 #if defined(PANDA_USE_FUTEX)
66             int32_t *addr = reinterpret_cast<int32_t*>(&passBarrierCount_);
67             if (futex(addr, FUTEX_WAIT_PRIVATE, curCount, nullptr, nullptr, 0) != 0) {
68                 if (errno != EAGAIN && errno != EINTR) {
69                     LOG_GC(FATAL) << "SuspendBarrier::Wait failed, errno = " << errno;
70                     UNREACHABLE();
71                 }
72             }
73 #else
74             sched_yield();
75 #endif
76         } else {
77             // Use seq_cst to synchronize memory.
78             curCount = passBarrierCount_.load(std::memory_order_seq_cst);
79             ASSERT(curCount == 0);
80             break;
81         }
82     }
83 }
84 
85 thread_local JSThread *currentThread = nullptr;
86 
GetCurrent()87 JSThread *JSThread::GetCurrent()
88 {
89     return currentThread;
90 }
91 
92 // static
RegisterThread(JSThread * jsThread)93 void JSThread::RegisterThread(JSThread *jsThread)
94 {
95     Runtime::GetInstance()->RegisterThread(jsThread);
96     // If it is not true, we created a new thread for future fork
97     if (currentThread == nullptr) {
98         currentThread = jsThread;
99         if (LIKELY(!g_isEnableCMCGC)) {
100             jsThread->UpdateState(ThreadState::NATIVE);
101         } else {
102             jsThread->GetThreadHolder()->TransferToNative();
103         }
104     }
105 }
106 
UnregisterThread(JSThread * jsThread)107 void JSThread::UnregisterThread(JSThread *jsThread)
108 {
109     if (currentThread == jsThread) {
110         if (LIKELY(!g_isEnableCMCGC)) {
111             jsThread->UpdateState(ThreadState::TERMINATED);
112         } else {
113             jsThread->GetThreadHolder()->TransferToNative();
114             jsThread->SetAllocBuffer(nullptr);
115         }
116         currentThread = nullptr;
117     } else {
118         if (LIKELY(!g_isEnableCMCGC)) {
119             // We have created this JSThread instance but hadn't forked it.
120             ASSERT(jsThread->GetState() == ThreadState::CREATED);
121             jsThread->UpdateState(ThreadState::TERMINATED);
122         } else {
123             jsThread->GetThreadHolder()->TransferToNative();
124             jsThread->SetAllocBuffer(nullptr);
125         }
126     }
127     Runtime::GetInstance()->UnregisterThread(jsThread);
128 }
129 
130 // static
Create(EcmaVM * vm)131 JSThread *JSThread::Create(EcmaVM *vm)
132 {
133     auto jsThread = new JSThread(vm);
134     vm->SetJSThread(jsThread);
135     AsmInterParsedOption asmInterOpt = vm->GetJSOptions().GetAsmInterParsedOption();
136     if (asmInterOpt.enableAsm) {
137         jsThread->EnableAsmInterpreter();
138     }
139 
140     jsThread->nativeAreaAllocator_ = vm->GetNativeAreaAllocator();
141     jsThread->heapRegionAllocator_ = vm->GetHeapRegionAllocator();
142     // algin with 16
143     size_t maxStackSize = vm->GetEcmaParamConfiguration().GetMaxStackSize();
144     jsThread->glueData_.frameBase_ = static_cast<JSTaggedType *>(
145         vm->GetNativeAreaAllocator()->Allocate(sizeof(JSTaggedType) * maxStackSize));
146     jsThread->glueData_.currentFrame_ = jsThread->glueData_.frameBase_ + maxStackSize;
147     EcmaInterpreter::InitStackFrame(jsThread);
148 
149     jsThread->glueData_.stackLimit_ = GetAsmStackLimit();
150     jsThread->glueData_.stackStart_ = GetCurrentStackPosition();
151     jsThread->glueData_.isEnableMutantArray_ = vm->IsEnableMutantArray();
152     jsThread->glueData_.IsEnableElementsKind_ = vm->IsEnableElementsKind();
153     jsThread->SetThreadId();
154 
155     if (UNLIKELY(g_isEnableCMCGC)) {
156         jsThread->glueData_.threadHolder_ = ToUintPtr(ThreadHolder::CreateAndRegisterNewThreadHolder(vm));
157     }
158 
159     RegisterThread(jsThread);
160     return jsThread;
161 }
162 
JSThread(EcmaVM * vm)163 JSThread::JSThread(EcmaVM *vm) : id_(os::thread::GetCurrentThreadId()), vm_(vm)
164 {
165     auto chunk = vm->GetChunk();
166     if (!vm_->GetJSOptions().EnableGlobalLeakCheck()) {
167         globalStorage_ = chunk->New<EcmaGlobalStorage<Node>>(this, vm->GetNativeAreaAllocator());
168         newGlobalHandle_ = [this](JSTaggedType value) {
169             return globalStorage_->NewGlobalHandle<NodeKind::NORMAL_NODE>(value);
170         };
171         disposeGlobalHandle_ = [this](uintptr_t nodeAddr) {
172             globalStorage_->DisposeGlobalHandle<NodeKind::NORMAL_NODE>(nodeAddr);
173         };
174         if (Runtime::GetInstance()->IsHybridVm()) {
175             newXRefGlobalHandle_ = [this](JSTaggedType value) {
176                 return globalStorage_->NewGlobalHandle<NodeKind::UNIFIED_NODE>(value);
177             };
178             disposeXRefGlobalHandle_ = [this](uintptr_t nodeAddr) {
179                 globalStorage_->DisposeGlobalHandle<NodeKind::UNIFIED_NODE>(nodeAddr);
180             };
181             setNodeKind_ = [this](NodeKind nodeKind) { globalStorage_->SetNodeKind(nodeKind); };
182         }
183         setWeak_ = [this](uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack,
184                         WeakClearCallback nativeFinalizeCallBack) {
185             return globalStorage_->SetWeak(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack);
186         };
187         clearWeak_ = [this](uintptr_t nodeAddr) { return globalStorage_->ClearWeak(nodeAddr); };
188         isWeak_ = [this](uintptr_t addr) { return globalStorage_->IsWeak(addr); };
189     } else {
190         globalDebugStorage_ = chunk->New<EcmaGlobalStorage<DebugNode>>(this, vm->GetNativeAreaAllocator());
191         newGlobalHandle_ = [this](JSTaggedType value) {
192             return globalDebugStorage_->NewGlobalHandle<NodeKind::NORMAL_NODE>(value);
193         };
194         disposeGlobalHandle_ = [this](uintptr_t nodeAddr) {
195             globalDebugStorage_->DisposeGlobalHandle<NodeKind::NORMAL_NODE>(nodeAddr);
196         };
197         setWeak_ = [this](uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack,
198                         WeakClearCallback nativeFinalizeCallBack) {
199             return globalDebugStorage_->SetWeak(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack);
200         };
201         clearWeak_ = [this](uintptr_t nodeAddr) { return globalDebugStorage_->ClearWeak(nodeAddr); };
202         isWeak_ = [this](uintptr_t addr) { return globalDebugStorage_->IsWeak(addr); };
203         if (Runtime::GetInstance()->IsHybridVm()) {
204             newXRefGlobalHandle_ = [this](JSTaggedType value) {
205                 return globalDebugStorage_->NewGlobalHandle<NodeKind::UNIFIED_NODE>(value);
206             };
207             disposeXRefGlobalHandle_ = [this](uintptr_t nodeAddr) {
208                 globalDebugStorage_->DisposeGlobalHandle<NodeKind::UNIFIED_NODE>(nodeAddr);
209             };
210             setNodeKind_ = [this](NodeKind nodeKind) { globalDebugStorage_->SetNodeKind(nodeKind); };
211         }
212     }
213     vmThreadControl_ = new VmThreadControl(this);
214     SetBCStubStatus(BCStubStatus::NORMAL_BC_STUB);
215     dateUtils_ = new DateUtils();
216 
217     glueData_.propertiesCache_ = new PropertiesCache();
218     if (vm_->GetJSOptions().IsEnableMegaIC()) {
219         glueData_.loadMegaICCache_ = new MegaICCache();
220         glueData_.storeMegaICCache_ = new MegaICCache();
221     }
222 
223     glueData_.globalConst_ = new GlobalEnvConstants();
224     glueData_.baseAddress_ = TaggedStateWord::BASE_ADDRESS;
225     glueData_.isEnableCMCGC_ = g_isEnableCMCGC;
226 }
227 
JSThread(EcmaVM * vm,ThreadType threadType)228 JSThread::JSThread(EcmaVM *vm, ThreadType threadType) : id_(os::thread::GetCurrentThreadId()),
229                                                         vm_(vm), threadType_(threadType)
230 {
231     ASSERT(threadType == ThreadType::JIT_THREAD);
232     // jit thread no need GCIterating
233     readyForGCIterating_ = false;
234     glueData_.isEnableCMCGC_ = g_isEnableCMCGC;
235     if (UNLIKELY(g_isEnableCMCGC)) {
236         glueData_.threadHolder_ = ToUintPtr(ThreadHolder::CreateAndRegisterNewThreadHolder(nullptr));
237     }
238     RegisterThread(this);
239 };
240 
JSThread(ThreadType threadType)241 JSThread::JSThread(ThreadType threadType) : threadType_(threadType)
242 {
243     ASSERT(threadType == ThreadType::DAEMON_THREAD);
244     // daemon thread no need GCIterating
245     readyForGCIterating_ = false;
246     glueData_.isEnableCMCGC_ = g_isEnableCMCGC;
247 }
248 
~JSThread()249 JSThread::~JSThread()
250 {
251     readyForGCIterating_ = false;
252     if (globalStorage_ != nullptr) {
253         vm_->GetChunk()->Delete(globalStorage_);
254         globalStorage_ = nullptr;
255     }
256     if (globalDebugStorage_ != nullptr) {
257         vm_->GetChunk()->Delete(globalDebugStorage_);
258         globalDebugStorage_ = nullptr;
259     }
260 
261     ClearMegaIC();
262 
263     if (glueData_.propertiesCache_ != nullptr) {
264         delete glueData_.propertiesCache_;
265         glueData_.propertiesCache_ = nullptr;
266     }
267 
268     if (threadType_ == ThreadType::JS_THREAD) {
269         GetNativeAreaAllocator()->Free(glueData_.frameBase_, sizeof(JSTaggedType) *
270                                        vm_->GetEcmaParamConfiguration().GetMaxStackSize());
271     }
272     GetNativeAreaAllocator()->FreeArea(regExpCacheArea_);
273 
274     glueData_.frameBase_ = nullptr;
275     nativeAreaAllocator_ = nullptr;
276     heapRegionAllocator_ = nullptr;
277     regExpCacheArea_ = nullptr;
278     if (vmThreadControl_ != nullptr) {
279         delete vmThreadControl_;
280         vmThreadControl_ = nullptr;
281     }
282     // DaemonThread will be unregistered when the binding std::thread release.
283     if (!IsDaemonThread()) {
284         UnregisterThread(this);
285     }
286     if (dateUtils_ != nullptr) {
287         delete dateUtils_;
288         dateUtils_ = nullptr;
289     }
290     if (glueData_.moduleLogger_ != nullptr) {
291         delete glueData_.moduleLogger_;
292         glueData_.moduleLogger_ = nullptr;
293     }
294     if (glueData_.globalConst_ != nullptr) {
295         delete glueData_.globalConst_;
296         glueData_.globalConst_ = nullptr;
297     }
298 }
299 
GetCurrentThreadId()300 ThreadId JSThread::GetCurrentThreadId()
301 {
302     return GetCurrentThreadOrTaskId();
303 }
304 
SetException(JSTaggedValue exception)305 void JSThread::SetException(JSTaggedValue exception)
306 {
307     glueData_.exception_ = exception;
308 #if defined(ENABLE_EXCEPTION_BACKTRACE)
309     if (vm_->GetJSOptions().EnableExceptionBacktrace()) {
310         LOG_ECMA(INFO) << "SetException:" << exception.GetRawData();
311         std::ostringstream stack;
312         Backtrace(stack);
313         LOG_ECMA(INFO) << stack.str();
314     }
315 #endif
316 }
317 
HandleUncaughtException(JSTaggedValue exception)318 void JSThread::HandleUncaughtException(JSTaggedValue exception)
319 {
320     [[maybe_unused]] EcmaHandleScope handleScope(this);
321     JSHandle<JSTaggedValue> exceptionHandle(this, exception);
322     if (isUncaughtExceptionRegistered_) {
323         if (vm_->GetJSThread()->IsMainThread()) {
324             return;
325         }
326         auto callback = GetOnErrorCallback();
327         if (callback) {
328             ClearException();
329             Local<ObjectRef> exceptionRef = JSNApiHelper::ToLocal<ObjectRef>(exceptionHandle);
330             ThreadNativeScope nativeScope(this);
331             callback(exceptionRef, GetOnErrorData());
332         }
333     }
334     // if caught exceptionHandle type is JSError
335     ClearException();
336     if (exceptionHandle->IsJSError()) {
337         base::ErrorHelper::PrintJSErrorInfo(this, exceptionHandle);
338         return;
339     }
340     JSHandle<EcmaString> result = JSTaggedValue::ToString(this, exceptionHandle);
341     LOG_NO_TAG(ERROR) << ConvertToString(this, *result);
342 }
343 
HandleUncaughtException()344 void JSThread::HandleUncaughtException()
345 {
346     if (!HasPendingException()) {
347         return;
348     }
349     JSTaggedValue exception = GetException();
350     HandleUncaughtException(exception);
351 }
352 
GetCurrentLexenv() const353 JSTaggedValue JSThread::GetCurrentLexenv() const
354 {
355     FrameHandler frameHandler(const_cast<JSThread*>(this));
356     return frameHandler.GetEnv();
357 }
358 
GetCurrentFunction() const359 JSTaggedValue JSThread::GetCurrentFunction() const
360 {
361     FrameHandler frameHandler(const_cast<JSThread*>(this));
362     return frameHandler.GetFunction();
363 }
364 
GetCurrentFrame() const365 const JSTaggedType *JSThread::GetCurrentFrame() const
366 {
367     if (IsAsmInterpreter()) {
368         return GetLastLeaveFrame();
369     }
370     return GetCurrentSPFrame();
371 }
372 
SetCurrentFrame(JSTaggedType * sp)373 void JSThread::SetCurrentFrame(JSTaggedType *sp)
374 {
375     if (IsAsmInterpreter()) {
376         return SetLastLeaveFrame(sp);
377     }
378     return SetCurrentSPFrame(sp);
379 }
380 
GetCurrentInterpretedFrame() const381 const JSTaggedType *JSThread::GetCurrentInterpretedFrame() const
382 {
383     if (IsAsmInterpreter()) {
384         auto frameHandler = FrameHandler(const_cast<JSThread*>(this));
385         return frameHandler.GetSp();
386     }
387     return GetCurrentSPFrame();
388 }
389 
InvokeWeakNodeFreeGlobalCallBack()390 void JSThread::InvokeWeakNodeFreeGlobalCallBack()
391 {
392     while (!weakNodeFreeGlobalCallbacks_.empty()) {
393         auto callbackPair = weakNodeFreeGlobalCallbacks_.back();
394         weakNodeFreeGlobalCallbacks_.pop_back();
395         ASSERT(callbackPair.first != nullptr && callbackPair.second != nullptr);
396         auto callback = callbackPair.first;
397         (*callback)(callbackPair.second);
398     }
399 }
400 
InvokeWeakNodeNativeFinalizeCallback()401 void JSThread::InvokeWeakNodeNativeFinalizeCallback()
402 {
403     if (ShouldIgnoreFinalizeCallback()) {
404         weakNodeNativeFinalizeCallbacks_.clear();
405         return;
406     }
407     // the second callback may lead to another GC, if this, return directly;
408     if (runningNativeFinalizeCallbacks_) {
409         return;
410     }
411     runningNativeFinalizeCallbacks_ = true;
412     TRACE_GC(GCStats::Scope::ScopeId::InvokeNativeFinalizeCallbacks, vm_->GetEcmaGCStats());
413     OHOS_HITRACE(HITRACE_LEVEL_COMMERCIAL, ("InvokeNativeFinalizeCallbacks num:"
414         + std::to_string(weakNodeNativeFinalizeCallbacks_.size())).c_str(), "");
415     while (!weakNodeNativeFinalizeCallbacks_.empty()) {
416         auto callbackPair = weakNodeNativeFinalizeCallbacks_.back();
417         weakNodeNativeFinalizeCallbacks_.pop_back();
418         ASSERT(callbackPair.first != nullptr && callbackPair.second != nullptr);
419         auto callback = callbackPair.first;
420         (*callback)(callbackPair.second);
421     }
422     if (finalizeTaskCallback_ != nullptr) {
423         finalizeTaskCallback_();
424     }
425     runningNativeFinalizeCallbacks_ = false;
426 }
427 
IsStartGlobalLeakCheck() const428 bool JSThread::IsStartGlobalLeakCheck() const
429 {
430     return vm_->GetJSOptions().IsStartGlobalLeakCheck();
431 }
432 
EnableGlobalObjectLeakCheck() const433 bool JSThread::EnableGlobalObjectLeakCheck() const
434 {
435     return vm_->GetJSOptions().EnableGlobalObjectLeakCheck();
436 }
437 
EnableGlobalPrimitiveLeakCheck() const438 bool JSThread::EnableGlobalPrimitiveLeakCheck() const
439 {
440     return vm_->GetJSOptions().EnableGlobalPrimitiveLeakCheck();
441 }
442 
IsInRunningStateOrProfiling() const443 bool JSThread::IsInRunningStateOrProfiling() const
444 {
445     bool result = IsInRunningState();
446 #if defined(ECMASCRIPT_SUPPORT_HEAPPROFILER)
447     result |= vm_->GetHeapProfile() != nullptr;
448 #endif
449 #if defined(ECMASCRIPT_SUPPORT_CPUPROFILER)
450     result |= GetIsProfiling();
451 #endif
452     return result;
453 }
454 
WriteToStackTraceFd(std::ostringstream & buffer) const455 void JSThread::WriteToStackTraceFd(std::ostringstream &buffer) const
456 {
457     if (stackTraceFd_ < 0) {
458         return;
459     }
460     buffer << std::endl;
461     DPrintf(reinterpret_cast<fd_t>(stackTraceFd_), buffer.str());
462     buffer.str("");
463 }
464 
SetStackTraceFd(int32_t fd)465 void JSThread::SetStackTraceFd(int32_t fd)
466 {
467     FdsanExchangeOwnerTag(reinterpret_cast<fd_t>(fd));
468     stackTraceFd_ = fd;
469 }
470 
CloseStackTraceFd()471 void JSThread::CloseStackTraceFd()
472 {
473     if (stackTraceFd_ != -1) {
474         FSync(reinterpret_cast<fd_t>(stackTraceFd_));
475         Close(reinterpret_cast<fd_t>(stackTraceFd_));
476         stackTraceFd_ = -1;
477     }
478 }
479 
SetJitCodeMap(JSTaggedType exception,MachineCode * machineCode,std::string & methodName,uintptr_t offset)480 void JSThread::SetJitCodeMap(JSTaggedType exception,  MachineCode* machineCode, std::string &methodName,
481     uintptr_t offset)
482 {
483     auto it = jitCodeMaps_.find(exception);
484     if (it != jitCodeMaps_.end()) {
485         it->second->push_back(std::make_tuple(machineCode, methodName, offset));
486     } else {
487         JitCodeVector *jitCode = new JitCodeVector {std::make_tuple(machineCode, methodName, offset)};
488         jitCodeMaps_.emplace(exception, jitCode);
489     }
490 }
491 
IterateMegaIC(RootVisitor & v)492 void JSThread::IterateMegaIC(RootVisitor &v)
493 {
494     if (glueData_.loadMegaICCache_ != nullptr) {
495         glueData_.loadMegaICCache_->Iterate(v);
496     }
497     if (glueData_.storeMegaICCache_ != nullptr) {
498         glueData_.storeMegaICCache_->Iterate(v);
499     }
500 }
501 
ClearMegaIC()502 void JSThread::ClearMegaIC()
503 {
504     if (glueData_.loadMegaICCache_ != nullptr) {
505         delete glueData_.loadMegaICCache_;
506         glueData_.loadMegaICCache_ = nullptr;
507     }
508     if (glueData_.storeMegaICCache_ != nullptr) {
509         delete glueData_.storeMegaICCache_;
510         glueData_.storeMegaICCache_ = nullptr;
511     }
512 }
513 
Iterate(RootVisitor & visitor)514 void JSThread::Iterate(RootVisitor &visitor)
515 {
516     if (!glueData_.exception_.IsHole()) {
517         visitor.VisitRoot(Root::ROOT_VM, ObjectSlot(ToUintPtr(&glueData_.exception_)));
518     }
519     if (!glueData_.currentEnv_.IsHole()) {
520         visitor.VisitRoot(Root::ROOT_VM, ObjectSlot(ToUintPtr(&glueData_.currentEnv_)));
521     }
522     if (!hotReloadDependInfo_.IsUndefined()) {
523         visitor.VisitRoot(Root::ROOT_VM, ObjectSlot(ToUintPtr(&hotReloadDependInfo_)));
524     }
525     visitor.VisitRangeRoot(Root::ROOT_VM,
526         ObjectSlot(glueData_.builtinEntries_.Begin()), ObjectSlot(glueData_.builtinEntries_.End()));
527 
528     // visit stack roots
529     FrameHandler frameHandler(this);
530     frameHandler.Iterate(visitor);
531     // visit tagged handle storage roots
532     if (vm_->GetJSOptions().EnableGlobalLeakCheck()) {
533         IterateHandleWithCheck(visitor);
534     } else {
535         OHOS_HITRACE(HITRACE_LEVEL_COMMERCIAL, "CMCGC::VisitRootGlobalRefHandle", "");
536         size_t globalCount = 0;
537         auto callback = [&visitor, &globalCount](Node *node) {
538             JSTaggedValue value(node->GetObject());
539             if (value.IsHeapObject()) {
540                 visitor.VisitRoot(Root::ROOT_HANDLE, ecmascript::ObjectSlot(node->GetObjectAddress()));
541             }
542             globalCount++;
543         };
544         globalStorage_->IterateUsageGlobal(callback);
545         static bool hasCheckedGlobalCount = false;
546         static const size_t WARN_GLOBAL_COUNT = 100000;
547         if (!hasCheckedGlobalCount && globalCount >= WARN_GLOBAL_COUNT) {
548             LOG_ECMA(WARN) << "Global reference count is " << globalCount << ",It exceed the upper limit 100000!";
549             hasCheckedGlobalCount = true;
550         }
551     }
552 
553     IterateMegaIC(visitor);
554 
555     if (glueData_.propertiesCache_ != nullptr) {
556         glueData_.propertiesCache_->Clear();
557     }
558 
559     if (glueData_.globalConst_ != nullptr) {
560         glueData_.globalConst_->Iterate(visitor);
561     }
562 }
563 
ClearCache()564 void JSThread::ClearCache()
565 {
566     JSHandle<GlobalEnv> env = GetGlobalEnv();
567     if (!env.GetTaggedValue().IsHole()) {
568         env->ClearCache(this);
569     }
570 }
571 
IterateJitCodeMap(const JitCodeMapVisitor & jitCodeMapVisitor)572 void JSThread::IterateJitCodeMap(const JitCodeMapVisitor &jitCodeMapVisitor)
573 {
574     jitCodeMapVisitor(jitCodeMaps_);
575 }
576 
IterateHandleWithCheck(RootVisitor & visitor)577 void JSThread::IterateHandleWithCheck(RootVisitor &visitor)
578 {
579     size_t handleCount = vm_->IterateHandle(visitor);
580 
581     size_t globalCount = 0;
582     static const int JS_TYPE_SUM = static_cast<int>(JSType::TYPE_LAST) + 1;
583     int typeCount[JS_TYPE_SUM] = { 0 };
584     int primitiveCount = 0;
585     bool isStopObjectLeakCheck = EnableGlobalObjectLeakCheck() && !IsStartGlobalLeakCheck() && stackTraceFd_ > 0;
586     bool isStopPrimitiveLeakCheck = EnableGlobalPrimitiveLeakCheck() && !IsStartGlobalLeakCheck() && stackTraceFd_ > 0;
587     std::ostringstream buffer;
588     auto callback = [this, &visitor, &globalCount, &typeCount, &primitiveCount,
589         isStopObjectLeakCheck, isStopPrimitiveLeakCheck, &buffer](DebugNode *node) {
590         node->MarkCount();
591         JSTaggedValue value(node->GetObject());
592         if (value.IsHeapObject()) {
593             visitor.VisitRoot(Root::ROOT_HANDLE, ecmascript::ObjectSlot(node->GetObjectAddress()));
594             auto object = reinterpret_cast<TaggedObject *>(node->GetObject());
595             typeCount[static_cast<int>(object->GetClass()->GetObjectType())]++;
596 
597             // Print global information about possible memory leaks.
598             // You can print the global new stack within the range of the leaked global number.
599             if (isStopObjectLeakCheck && node->GetGlobalNumber() > 0 && node->GetMarkCount() > 0) {
600                 buffer << "Global maybe leak object address:" << std::hex << object <<
601                     ", type:" << JSHClass::DumpJSType(JSType(object->GetClass()->GetObjectType())) <<
602                     ", node address:" << node << ", number:" << std::dec <<  node->GetGlobalNumber() <<
603                     ", markCount:" << node->GetMarkCount();
604                 WriteToStackTraceFd(buffer);
605             }
606         } else {
607             primitiveCount++;
608             if (isStopPrimitiveLeakCheck && node->GetGlobalNumber() > 0 && node->GetMarkCount() > 0) {
609                 buffer << "Global maybe leak primitive:" << std::hex << value.GetRawData() <<
610                     ", node address:" << node << ", number:" << std::dec <<  node->GetGlobalNumber() <<
611                     ", markCount:" << node->GetMarkCount();
612                 WriteToStackTraceFd(buffer);
613             }
614         }
615         globalCount++;
616     };
617     globalDebugStorage_->IterateUsageGlobal(callback);
618     if (isStopObjectLeakCheck || isStopPrimitiveLeakCheck) {
619         buffer << "Global leak check success!";
620         WriteToStackTraceFd(buffer);
621         CloseStackTraceFd();
622     }
623     // Determine whether memory leakage by checking handle and global count.
624     LOG_ECMA(INFO) << "Iterate root handle count:" << handleCount << ", global handle count:" << globalCount;
625     OPTIONAL_LOG(vm_, INFO) << "Global type Primitive count:" << primitiveCount;
626     // Print global object type statistic.
627     static const int MIN_COUNT_THRESHOLD = 50;
628     for (int i = 0; i < JS_TYPE_SUM; i++) {
629         if (typeCount[i] > MIN_COUNT_THRESHOLD) {
630             OPTIONAL_LOG(vm_, INFO) << "Global type " << JSHClass::DumpJSType(JSType(i))
631                                             << " count:" << typeCount[i];
632         }
633     }
634 }
635 
IterateWeakEcmaGlobalStorage(WeakVisitor & visitor)636 void JSThread::IterateWeakEcmaGlobalStorage(WeakVisitor &visitor)
637 {
638     auto callBack = [this, &visitor](WeakNode *node) {
639         JSTaggedValue value(node->GetObject());
640         if (!value.IsHeapObject()) {
641             return;
642         };
643         bool isAlive = visitor.VisitRoot(Root::ROOT_VM, ecmascript::ObjectSlot(node->GetObjectAddress()));
644         if (!isAlive) {
645             node->SetObject(JSTaggedValue::Undefined().GetRawData());
646             auto nativeFinalizeCallback = node->GetNativeFinalizeCallback();
647             if (nativeFinalizeCallback) {
648                 weakNodeNativeFinalizeCallbacks_.push_back(std::make_pair(nativeFinalizeCallback,
649                                                                           node->GetReference()));
650             }
651             auto freeGlobalCallBack = node->GetFreeGlobalCallback();
652             if (!freeGlobalCallBack) {
653                 // If no callback, dispose global immediately
654                 DisposeGlobalHandle(ToUintPtr(node));
655             } else {
656                 weakNodeFreeGlobalCallbacks_.push_back(std::make_pair(freeGlobalCallBack, node->GetReference()));
657             }
658         }
659     };
660 
661     if (!vm_->GetJSOptions().EnableGlobalLeakCheck()) {
662         globalStorage_->IterateWeakUsageGlobal(callBack);
663     } else {
664         globalDebugStorage_->IterateWeakUsageGlobal(callBack);
665     }
666 }
667 
IterateWeakEcmaGlobalStorage(const WeakRootVisitor & visitor,GCKind gcKind)668 void JSThread::IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor, GCKind gcKind)
669 {
670     auto callBack = [this, visitor, gcKind](WeakNode *node) {
671         JSTaggedValue value(node->GetObject());
672         if (!value.IsHeapObject()) {
673             return;
674         }
675         auto object = value.GetTaggedObject();
676         auto fwd = visitor(object);
677         if (fwd == nullptr) {
678             // undefind
679             node->SetObject(JSTaggedValue::Undefined().GetRawData());
680             auto nativeFinalizeCallback = node->GetNativeFinalizeCallback();
681             if (nativeFinalizeCallback) {
682                 weakNodeNativeFinalizeCallbacks_.push_back(std::make_pair(nativeFinalizeCallback,
683                                                                           node->GetReference()));
684             }
685             auto freeGlobalCallBack = node->GetFreeGlobalCallback();
686             if (!freeGlobalCallBack) {
687                 // If no callback, dispose global immediately
688                 DisposeGlobalHandle(ToUintPtr(node));
689             } else if (gcKind == GCKind::SHARED_GC) {
690                 // For shared GC, free global should defer execute in its own thread
691                 weakNodeFreeGlobalCallbacks_.push_back(std::make_pair(freeGlobalCallBack, node->GetReference()));
692             } else {
693                 node->CallFreeGlobalCallback();
694             }
695         } else if (fwd != object) {
696             // update
697             node->SetObject(JSTaggedValue(fwd).GetRawData());
698         }
699     };
700     if (!vm_->GetJSOptions().EnableGlobalLeakCheck()) {
701         globalStorage_->IterateWeakUsageGlobal(callBack);
702     } else {
703         globalDebugStorage_->IterateWeakUsageGlobal(callBack);
704     }
705 }
706 
UpdateJitCodeMapReference(const WeakRootVisitor & visitor)707 void JSThread::UpdateJitCodeMapReference(const WeakRootVisitor &visitor)
708 {
709     auto it = jitCodeMaps_.begin();
710     while (it != jitCodeMaps_.end()) {
711         auto obj = reinterpret_cast<TaggedObject *>(it->first);
712         auto fwd = visitor(obj);
713         if (fwd == nullptr) {
714             delete it->second;
715             it = jitCodeMaps_.erase(it);
716         } else if (fwd != obj) {
717             jitCodeMaps_.emplace(JSTaggedValue(fwd).GetRawData(), it->second);
718             it = jitCodeMaps_.erase(it);
719         } else {
720             ++it;
721         }
722     }
723 }
724 
DoStackOverflowCheck(const JSTaggedType * sp)725 bool JSThread::DoStackOverflowCheck(const JSTaggedType *sp)
726 {
727     // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
728     if (UNLIKELY(!IsCrossThreadExecutionEnable() && sp <= glueData_.frameBase_ + RESERVE_STACK_SIZE)) {
729         vm_->CheckThread();
730         LOG_ECMA(ERROR) << "Stack overflow! Remaining stack size is: " << (sp - glueData_.frameBase_);
731         if (LIKELY(!HasPendingException())) {
732             ObjectFactory *factory = vm_->GetFactory();
733             JSHandle<JSObject> error = factory->GetJSError(base::ErrorType::RANGE_ERROR,
734                                                            "Stack overflow!", StackCheck::NO);
735             SetException(error.GetTaggedValue());
736         }
737         return true;
738     }
739     return false;
740 }
741 
DoStackLimitCheck()742 bool JSThread::DoStackLimitCheck()
743 {
744     if (UNLIKELY(!IsCrossThreadExecutionEnable() && GetCurrentStackPosition() < GetStackLimit())) {
745         vm_->CheckThread();
746         LOG_ECMA(ERROR) << "Stack overflow! current:" << GetCurrentStackPosition() << " limit:" << GetStackLimit();
747         if (LIKELY(!HasPendingException())) {
748             ObjectFactory *factory = vm_->GetFactory();
749             JSHandle<JSObject> error = factory->GetJSError(base::ErrorType::RANGE_ERROR,
750                                                            "Stack overflow!", StackCheck::NO);
751             SetException(error.GetTaggedValue());
752         }
753         return true;
754     }
755     return false;
756 }
757 
SetInitialBuiltinHClass(BuiltinTypeId type,JSHClass * builtinHClass,JSHClass * instanceHClass,JSHClass * prototypeHClass,JSHClass * prototypeOfPrototypeHClass,JSHClass * extraHClass)758 void JSThread::SetInitialBuiltinHClass(
759     BuiltinTypeId type, JSHClass *builtinHClass, JSHClass *instanceHClass,
760     JSHClass *prototypeHClass, JSHClass *prototypeOfPrototypeHClass, JSHClass *extraHClass)
761 {
762     size_t index = BuiltinHClassEntries::GetEntryIndex(type);
763     auto &entry = glueData_.builtinHClassEntries_.entries[index];
764     LOG_ECMA(DEBUG) << "JSThread::SetInitialBuiltinHClass: "
765                     << "Builtin = " << ToString(type)
766                     << ", builtinHClass = " << builtinHClass
767                     << ", instanceHClass = " << instanceHClass
768                     << ", prototypeHClass = " << prototypeHClass
769                     << ", prototypeOfPrototypeHClass = " << prototypeOfPrototypeHClass
770                     << ", extraHClass = " << extraHClass;
771     entry.builtinHClass = builtinHClass;
772     entry.instanceHClass = instanceHClass;
773     entry.prototypeHClass = prototypeHClass;
774     entry.prototypeOfPrototypeHClass = prototypeOfPrototypeHClass;
775     entry.extraHClass = extraHClass;
776 }
777 
SetInitialBuiltinGlobalHClass(JSHClass * builtinHClass,GlobalIndex globalIndex)778 void JSThread::SetInitialBuiltinGlobalHClass(
779     JSHClass *builtinHClass, GlobalIndex globalIndex)
780 {
781     auto &map = ctorHclassEntries_;
782     map[builtinHClass] = globalIndex;
783 }
784 
GetBuiltinHClass(BuiltinTypeId type) const785 JSHClass *JSThread::GetBuiltinHClass(BuiltinTypeId type) const
786 {
787     size_t index = BuiltinHClassEntries::GetEntryIndex(type);
788     return glueData_.builtinHClassEntries_.entries[index].builtinHClass;
789 }
790 
GetBuiltinInstanceHClass(BuiltinTypeId type) const791 JSHClass *JSThread::GetBuiltinInstanceHClass(BuiltinTypeId type) const
792 {
793     size_t index = BuiltinHClassEntries::GetEntryIndex(type);
794     return glueData_.builtinHClassEntries_.entries[index].instanceHClass;
795 }
796 
GetBuiltinExtraHClass(BuiltinTypeId type) const797 JSHClass *JSThread::GetBuiltinExtraHClass(BuiltinTypeId type) const
798 {
799     size_t index = BuiltinHClassEntries::GetEntryIndex(type);
800     return glueData_.builtinHClassEntries_.entries[index].extraHClass;
801 }
802 
GetBuiltinPrototypeHClass(BuiltinTypeId type) const803 JSHClass *JSThread::GetBuiltinPrototypeHClass(BuiltinTypeId type) const
804 {
805     size_t index = BuiltinHClassEntries::GetEntryIndex(type);
806     return glueData_.builtinHClassEntries_.entries[index].prototypeHClass;
807 }
808 
GetBuiltinPrototypeOfPrototypeHClass(BuiltinTypeId type) const809 JSHClass *JSThread::GetBuiltinPrototypeOfPrototypeHClass(BuiltinTypeId type) const
810 {
811     size_t index = BuiltinHClassEntries::GetEntryIndex(type);
812     return glueData_.builtinHClassEntries_.entries[index].prototypeOfPrototypeHClass;
813 }
814 
CheckSwitchDebuggerBCStub()815 void JSThread::CheckSwitchDebuggerBCStub()
816 {
817     auto isDebug = vm_->GetJsDebuggerManager()->IsDebugMode();
818     if (LIKELY(!isDebug)) {
819         if (glueData_.bcStubEntries_.Get(0) == glueData_.bcStubEntries_.Get(1)) {
820             for (size_t i = 0; i < BCStubEntries::BC_HANDLER_COUNT; i++) {
821                 auto stubEntry = glueData_.bcDebuggerStubEntries_.Get(i);
822                 auto debuggerStubEbtry = glueData_.bcStubEntries_.Get(i);
823                 glueData_.bcStubEntries_.Set(i, stubEntry);
824                 glueData_.bcDebuggerStubEntries_.Set(i, debuggerStubEbtry);
825             }
826         }
827     } else {
828         if (glueData_.bcDebuggerStubEntries_.Get(0) == glueData_.bcDebuggerStubEntries_.Get(1)) {
829             for (size_t i = 0; i < BCStubEntries::BC_HANDLER_COUNT; i++) {
830                 auto stubEntry = glueData_.bcStubEntries_.Get(i);
831                 auto debuggerStubEbtry = glueData_.bcDebuggerStubEntries_.Get(i);
832                 glueData_.bcDebuggerStubEntries_.Set(i, stubEntry);
833                 glueData_.bcStubEntries_.Set(i, debuggerStubEbtry);
834             }
835         }
836     }
837 }
838 
CheckOrSwitchPGOStubs()839 void JSThread::CheckOrSwitchPGOStubs()
840 {
841     bool isSwitch = false;
842     bool isSwitchToNormal = false;
843     if (IsPGOProfilerEnable()) {
844         if (GetBCStubStatus() == BCStubStatus::NORMAL_BC_STUB) {
845             SetBCStubStatus(BCStubStatus::PROFILE_BC_STUB);
846             isSwitch = true;
847         } else if (GetBCStubStatus() == BCStubStatus::STW_COPY_BC_STUB) {
848             SwitchStwCopyBCStubs(false);
849             ASSERT(GetBCStubStatus() == BCStubStatus::NORMAL_BC_STUB);
850             SetBCStubStatus(BCStubStatus::PROFILE_BC_STUB);
851             isSwitch = true;
852         }
853     } else {
854         if (GetBCStubStatus() == BCStubStatus::PROFILE_BC_STUB) {
855             SetBCStubStatus(BCStubStatus::NORMAL_BC_STUB);
856             isSwitch = true;
857             isSwitchToNormal = true;
858         }
859     }
860     if (isSwitch) {
861         Address curAddress;
862 #define SWITCH_PGO_STUB_ENTRY(fromName, toName, ...)                                                        \
863         curAddress = GetBCStubEntry(BytecodeStubCSigns::ID_##fromName);                                     \
864         SetBCStubEntry(BytecodeStubCSigns::ID_##fromName, GetBCStubEntry(BytecodeStubCSigns::ID_##toName)); \
865         SetBCStubEntry(BytecodeStubCSigns::ID_##toName, curAddress);
866         ASM_INTERPRETER_BC_PROFILER_STUB_LIST(SWITCH_PGO_STUB_ENTRY)
867 #undef SWITCH_PGO_STUB_ENTRY
868     }
869     if (isSwitchToNormal && !g_isEnableCMCGC) {
870         SwitchStwCopyBCStubs(true);
871     }
872 }
873 
SwitchJitProfileStubs(bool isEnablePgo)874 void JSThread::SwitchJitProfileStubs(bool isEnablePgo)
875 {
876     if (isEnablePgo) {
877         SetPGOProfilerEnable(true);
878         CheckOrSwitchPGOStubs();
879         return;
880     }
881     bool isSwitch = false;
882     if (GetBCStubStatus() == BCStubStatus::NORMAL_BC_STUB) {
883         SetBCStubStatus(BCStubStatus::JIT_PROFILE_BC_STUB);
884         isSwitch = true;
885     } else if (GetBCStubStatus() == BCStubStatus::STW_COPY_BC_STUB) {
886         SwitchStwCopyBCStubs(false);
887         ASSERT(GetBCStubStatus() == BCStubStatus::NORMAL_BC_STUB);
888         SetBCStubStatus(BCStubStatus::JIT_PROFILE_BC_STUB);
889         isSwitch = true;
890     }
891     if (isSwitch) {
892         Address curAddress;
893 #define SWITCH_PGO_STUB_ENTRY(fromName, toName, ...)                                                        \
894         curAddress = GetBCStubEntry(BytecodeStubCSigns::ID_##fromName);                                     \
895         SetBCStubEntry(BytecodeStubCSigns::ID_##fromName, GetBCStubEntry(BytecodeStubCSigns::ID_##toName)); \
896         SetBCStubEntry(BytecodeStubCSigns::ID_##toName, curAddress);
897         ASM_INTERPRETER_BC_JIT_PROFILER_STUB_LIST(SWITCH_PGO_STUB_ENTRY)
898 #undef SWITCH_PGO_STUB_ENTRY
899     }
900 }
901 
SwitchStwCopyBCStubs(bool isStwCopy)902 void JSThread::SwitchStwCopyBCStubs(bool isStwCopy)
903 {
904     bool isSwitch = false;
905     if (isStwCopy && GetBCStubStatus() == BCStubStatus::NORMAL_BC_STUB) {
906         SetBCStubStatus(BCStubStatus::STW_COPY_BC_STUB);
907         isSwitch = true;
908     } else if (!isStwCopy && GetBCStubStatus() == BCStubStatus::STW_COPY_BC_STUB) {
909         SetBCStubStatus(BCStubStatus::NORMAL_BC_STUB);
910         isSwitch = true;
911     }
912     if (isSwitch) {
913         Address curAddress;
914 #define SWITCH_STW_COPY_STUB_ENTRY(base)                                                                    \
915         curAddress = GetBCStubEntry(BytecodeStubCSigns::ID_##base);                                         \
916         SetBCStubEntry(BytecodeStubCSigns::ID_##base,                                                       \
917                        GetBCStubEntry(BytecodeStubCSigns::ID_##base##StwCopy));                             \
918         SetBCStubEntry(BytecodeStubCSigns::ID_##base##StwCopy, curAddress);
919         ASM_INTERPRETER_BC_STW_COPY_STUB_LIST(SWITCH_STW_COPY_STUB_ENTRY)
920 #undef SWITCH_STW_COPY_STUB_ENTRY
921     }
922 }
923 
SwitchStwCopyCommonStubs(bool isStwCopy)924 void JSThread::SwitchStwCopyCommonStubs(bool isStwCopy)
925 {
926     bool isSwitch = false;
927     if (isStwCopy && GetCommonStubStatus() == CommonStubStatus::NORMAL_COMMON_STUB) {
928         SetCommonStubStatus(CommonStubStatus::STW_COPY_COMMON_STUB);
929         isSwitch = true;
930     } else if (!isStwCopy && GetCommonStubStatus() == CommonStubStatus::STW_COPY_COMMON_STUB) {
931         SetCommonStubStatus(CommonStubStatus::NORMAL_COMMON_STUB);
932         isSwitch = true;
933     }
934     if (isSwitch) {
935         Address curAddress;
936 #define SWITCH_STW_COPY_STUB_ENTRY(base)                                                                    \
937         curAddress = GetFastStubEntry(CommonStubCSigns::base);                                              \
938         SetFastStubEntry(CommonStubCSigns::base,                                                            \
939                          GetFastStubEntry(CommonStubCSigns::base##StwCopy));                                \
940         SetFastStubEntry(CommonStubCSigns::base##StwCopy, curAddress);
941         COMMON_STW_COPY_STUB_LIST(SWITCH_STW_COPY_STUB_ENTRY)
942 #undef SWITCH_STW_COPY_STUB_ENTRY
943     }
944 }
945 
SwitchStwCopyBuiltinsStubs(bool isStwCopy)946 void JSThread::SwitchStwCopyBuiltinsStubs(bool isStwCopy)
947 {
948     bool isSwitch = false;
949     if (isStwCopy && GetBuiltinsStubStatus() == BuiltinsStubStatus::NORMAL_BUILTINS_STUB) {
950         SetBuiltinsStubStatus(BuiltinsStubStatus::STW_COPY_BUILTINS_STUB);
951         isSwitch = true;
952     } else if (!isStwCopy && GetBuiltinsStubStatus() == BuiltinsStubStatus::STW_COPY_BUILTINS_STUB) {
953         SetBuiltinsStubStatus(BuiltinsStubStatus::NORMAL_BUILTINS_STUB);
954         isSwitch = true;
955     }
956     if (isSwitch) {
957         Address curAddress;
958 #define SWITCH_STW_COPY_STUB_ENTRY(base)                                                                    \
959         curAddress = GetBuiltinStubEntry(BuiltinsStubCSigns::ID_##base);                                    \
960         SetBuiltinStubEntry(BuiltinsStubCSigns::ID_##base,                                                  \
961                             GetBuiltinStubEntry(BuiltinsStubCSigns::ID_##base##StwCopy));                   \
962         SetBuiltinStubEntry(BuiltinsStubCSigns::ID_##base##StwCopy, curAddress);
963 
964 #define SWITCH_STW_COPY_STUB_ENTRY_DYN(base, type, ...)                                                     \
965         SWITCH_STW_COPY_STUB_ENTRY(type##base)
966 
967         BUILTINS_STW_COPY_STUB_LIST(SWITCH_STW_COPY_STUB_ENTRY, SWITCH_STW_COPY_STUB_ENTRY_DYN, \
968             SWITCH_STW_COPY_STUB_ENTRY)
969 #undef SWITCH_STW_COPY_STUB_ENTRY_DYN
970 #undef SWITCH_STW_COPY_STUB_ENTRY
971     }
972 }
973 
TerminateExecution()974 void JSThread::TerminateExecution()
975 {
976     // set the TERMINATE_ERROR to exception
977     ObjectFactory *factory = vm_->GetFactory();
978     JSHandle<JSObject> error = factory->GetJSError(ErrorType::TERMINATION_ERROR,
979         "Terminate execution!", StackCheck::NO);
980     SetException(error.GetTaggedValue());
981 }
982 
CheckAndPassActiveBarrier()983 void JSThread::CheckAndPassActiveBarrier()
984 {
985     ASSERT(!g_isEnableCMCGC);
986     ThreadStateAndFlags oldStateAndFlags;
987     oldStateAndFlags.asNonvolatileInt = glueData_.stateAndFlags_.asInt;
988     if ((oldStateAndFlags.asNonvolatileStruct.flags & ThreadFlag::ACTIVE_BARRIER) != 0) {
989         PassSuspendBarrier();
990     }
991 }
992 
PassSuspendBarrier()993 bool JSThread::PassSuspendBarrier()
994 {
995     // Use suspendLock_ to avoid data-race between suspend-all-thread and suspended-threads.
996     LockHolder lock(suspendLock_);
997     if (suspendBarrier_ != nullptr) {
998         suspendBarrier_->PassStrongly();
999         suspendBarrier_ = nullptr;
1000         ClearFlag(ThreadFlag::ACTIVE_BARRIER);
1001         return true;
1002     }
1003     return false;
1004 }
1005 
ShouldHandleMarkingFinishedInSafepoint()1006 bool JSThread::ShouldHandleMarkingFinishedInSafepoint()
1007 {
1008     auto heap = const_cast<Heap *>(vm_->GetHeap());
1009     return IsMarkFinished() && heap->GetConcurrentMarker()->IsTriggeredConcurrentMark() &&
1010            !heap->GetOnSerializeEvent() && !heap->InSensitiveStatus() && !heap->CheckIfNeedStopCollectionByStartup();
1011 }
1012 
CheckSafepoint()1013 bool JSThread::CheckSafepoint()
1014 {
1015     ResetCheckSafePointStatus();
1016 
1017     if UNLIKELY(HasTerminationRequest()) {
1018         TerminateExecution();
1019         SetVMTerminated(true);
1020         SetTerminationRequest(false);
1021     }
1022 
1023     if UNLIKELY(HasSuspendRequest()) {
1024         WaitSuspension();
1025     }
1026 
1027     // vmThreadControl_ 's thread_ is current JSThread's this.
1028     if UNLIKELY(VMNeedSuspension()) {
1029         vmThreadControl_->SuspendVM();
1030     }
1031     if (HasInstallMachineCode()) {
1032         Jit::JitGCLockHolder lock(this);
1033         vm_->GetJit()->InstallTasks(this);
1034         SetInstallMachineCode(false);
1035     }
1036 
1037 #if defined(ECMASCRIPT_SUPPORT_CPUPROFILER)
1038     if UNLIKELY(needProfiling_.load() && !isProfiling_) {
1039         DFXJSNApi::StartCpuProfilerForFile(vm_, profileName_, CpuProfiler::INTERVAL_OF_INNER_START);
1040         SetNeedProfiling(false);
1041     }
1042 #endif // ECMASCRIPT_SUPPORT_CPUPROFILER
1043     bool gcTriggered = false;
1044 #ifndef NDEBUG
1045     if (vm_->GetJSOptions().EnableForceGC()) {
1046         if (g_isEnableCMCGC) {
1047             common::BaseRuntime::RequestGC(common::GC_REASON_USER, false,
1048                                            common::GC_TYPE_FULL);  // Trigger Full CMC here
1049         } else {
1050             vm_->CollectGarbage(TriggerGCType::FULL_GC);
1051         }
1052         gcTriggered = true;
1053     }
1054 #endif
1055     auto heap = const_cast<Heap *>(vm_->GetHeap());
1056     // Handle exit app senstive scene
1057     heap->HandleExitHighSensitiveEvent();
1058 
1059     // Do not trigger local gc during the shared gc processRset process.
1060     if UNLIKELY(IsProcessingLocalToSharedRset()) {
1061         return false;
1062     }
1063     // After concurrent mark finish, should trigger gc here to avoid create much floating garbage
1064     // except in serialize or high sensitive event
1065     if UNLIKELY(ShouldHandleMarkingFinishedInSafepoint()) {
1066         heap->SetCanThrowOOMError(false);
1067         heap->GetConcurrentMarker()->HandleMarkingFinished();
1068         heap->SetCanThrowOOMError(true);
1069         gcTriggered = true;
1070     }
1071     return gcTriggered;
1072 }
1073 
CheckJSTaggedType(JSTaggedType value) const1074 void JSThread::CheckJSTaggedType(JSTaggedType value) const
1075 {
1076     if (JSTaggedValue(value).IsHeapObject() &&
1077         !vm_->GetHeap()->IsAlive(reinterpret_cast<TaggedObject *>(value))) {
1078         LOG_FULL(FATAL) << "value:" << value << " is invalid!";
1079     }
1080 }
1081 
CpuProfilerCheckJSTaggedType(JSTaggedType value) const1082 bool JSThread::CpuProfilerCheckJSTaggedType(JSTaggedType value) const
1083 {
1084     if (JSTaggedValue(value).IsHeapObject() &&
1085         !vm_->GetHeap()->IsAlive(reinterpret_cast<TaggedObject *>(value))) {
1086         return false;
1087     }
1088     return true;
1089 }
1090 
1091 
GetAndClearCallSiteReturnAddr(uintptr_t callSiteSp)1092 uintptr_t JSThread::GetAndClearCallSiteReturnAddr(uintptr_t callSiteSp)
1093 {
1094     auto iter = callSiteSpToReturnAddrTable_.find(callSiteSp);
1095     ASSERT(iter != callSiteSpToReturnAddrTable_.end());
1096     uintptr_t returnAddr = iter->second;
1097     callSiteSpToReturnAddrTable_.erase(iter);
1098     return returnAddr;
1099 }
1100 
IsLegalAsmSp(uintptr_t sp) const1101 bool JSThread::IsLegalAsmSp(uintptr_t sp) const
1102 {
1103     uint64_t bottom = GetStackLimit() - EcmaParamConfiguration::GetDefaultReservedStackSize();
1104     uint64_t top = GetStackStart() + EcmaParamConfiguration::GetAllowedUpperStackDiff();
1105     return (bottom <= sp && sp <= top);
1106 }
1107 
IsLegalThreadSp(uintptr_t sp) const1108 bool JSThread::IsLegalThreadSp(uintptr_t sp) const
1109 {
1110     uintptr_t bottom = reinterpret_cast<uintptr_t>(glueData_.frameBase_);
1111     size_t maxStackSize = vm_->GetEcmaParamConfiguration().GetMaxStackSize();
1112     uintptr_t top = bottom + maxStackSize;
1113     return (bottom <= sp && sp <= top);
1114 }
1115 
IsLegalSp(uintptr_t sp) const1116 bool JSThread::IsLegalSp(uintptr_t sp) const
1117 {
1118     return IsLegalAsmSp(sp) || IsLegalThreadSp(sp);
1119 }
1120 
IsMainThread()1121 bool JSThread::IsMainThread()
1122 {
1123 #if !defined(PANDA_TARGET_WINDOWS) && !defined(PANDA_TARGET_MACOS) && !defined(PANDA_TARGET_IOS)
1124     return getpid() == syscall(SYS_gettid);
1125 #else
1126     return true;
1127 #endif
1128 }
1129 
ClearVMCachedConstantPool()1130 void JSThread::ClearVMCachedConstantPool()
1131 {
1132     vm_->ClearCachedConstantPool();
1133 }
1134 
GetGlobalEnv() const1135 JSHandle<GlobalEnv> JSThread::GetGlobalEnv() const
1136 {
1137     // currentEnv is GlobalEnv now
1138     return JSHandle<GlobalEnv>(ToUintPtr(&glueData_.currentEnv_));
1139 }
1140 
GetDependentInfo() const1141 JSHandle<DependentInfos> JSThread::GetDependentInfo() const
1142 {
1143     return JSHandle<DependentInfos>(ToUintPtr(&hotReloadDependInfo_));
1144 }
1145 
SetDependentInfo(JSTaggedValue info)1146 void JSThread::SetDependentInfo(JSTaggedValue info)
1147 {
1148     hotReloadDependInfo_ = info;
1149 }
1150 
GetOrCreateThreadDependentInfo()1151 JSHandle<DependentInfos> JSThread::GetOrCreateThreadDependentInfo()
1152 {
1153     if (hotReloadDependInfo_.IsUndefined()) {
1154         return GetEcmaVM()->GetFactory()->NewDependentInfos(0);
1155     }
1156     return GetDependentInfo();
1157 }
1158 
NotifyHotReloadDeoptimize()1159 void JSThread::NotifyHotReloadDeoptimize()
1160 {
1161     if (!hotReloadDependInfo_.IsHeapObject()) {
1162         return;
1163     }
1164     DependentInfos::TriggerLazyDeoptimization(GetDependentInfo(),
1165         this, DependentInfos::DependentState::HOTRELOAD_PATCHMAIN);
1166     hotReloadDependInfo_ = JSTaggedValue::Undefined();
1167 }
1168 
GetPropertiesCache() const1169 PropertiesCache *JSThread::GetPropertiesCache() const
1170 {
1171     return glueData_.propertiesCache_;
1172 }
1173 
GetLoadMegaICCache() const1174 MegaICCache *JSThread::GetLoadMegaICCache() const
1175 {
1176     return glueData_.loadMegaICCache_;
1177 }
1178 
GetStoreMegaICCache() const1179 MegaICCache *JSThread::GetStoreMegaICCache() const
1180 {
1181     return glueData_.storeMegaICCache_;
1182 }
1183 
IsReadyToUpdateDetector() const1184 bool JSThread::IsReadyToUpdateDetector() const
1185 {
1186     return !GetEnableLazyBuiltins() && !InGlobalEnvInitialize();
1187 }
1188 
GetOrCreateRegExpCacheArea()1189 Area *JSThread::GetOrCreateRegExpCacheArea()
1190 {
1191     if (regExpCacheArea_ == nullptr) {
1192         regExpCacheArea_ = nativeAreaAllocator_->AllocateArea(MAX_REGEXP_CACHE_SIZE);
1193     }
1194     return regExpCacheArea_;
1195 }
1196 
InitializeBuiltinObject(const JSHandle<GlobalEnv> & env,const std::string & key)1197 void JSThread::InitializeBuiltinObject(const JSHandle<GlobalEnv>& env, const std::string& key)
1198 {
1199     BuiltinIndex& builtins = BuiltinIndex::GetInstance();
1200     auto index = builtins.GetBuiltinIndex(key);
1201     ASSERT(index != BuiltinIndex::NOT_FOUND);
1202     /*
1203         If using `auto globalObject = vm_->GetGlobalEnv()->GetGlobalObject()` here,
1204         it will cause incorrect result in multi-context environment. For example:
1205 
1206         ```ts
1207         let obj = {};
1208         print(obj instanceof Object); // instead of true, will print false
1209         ```
1210     */
1211     auto globalObject = env->GetGlobalObject();
1212     auto jsObject = JSHandle<JSObject>(this, globalObject);
1213     auto box = jsObject->GetGlobalPropertyBox(this, key);
1214     if (box == nullptr) {
1215         return;
1216     }
1217     auto& entry = glueData_.builtinEntries_.builtin_[index];
1218     entry.box_ = JSTaggedValue::Cast(box);
1219     auto builtin = JSHandle<JSObject>(this, box->GetValue(this));
1220     auto hclass = builtin->GetJSHClass();
1221     entry.hClass_ = JSTaggedValue::Cast(hclass);
1222 }
1223 
InitializeBuiltinObject(const JSHandle<GlobalEnv> & env)1224 void JSThread::InitializeBuiltinObject(const JSHandle<GlobalEnv>& env)
1225 {
1226     BuiltinIndex& builtins = BuiltinIndex::GetInstance();
1227     for (auto key: builtins.GetBuiltinKeys()) {
1228         InitializeBuiltinObject(env, key);
1229     }
1230 }
1231 
IsPropertyCacheCleared() const1232 bool JSThread::IsPropertyCacheCleared() const
1233 {
1234     if (!GetPropertiesCache()->IsCleared()) {
1235         return false;
1236     }
1237     return true;
1238 }
1239 
UpdateState(ThreadState newState)1240 ThreadState JSThread::UpdateState(ThreadState newState)
1241 {
1242     ASSERT(!IsEnableCMCGC());
1243     ThreadState oldState = GetState();
1244     if (LIKELY(oldState != newState)) {
1245         if (oldState == ThreadState::RUNNING) {
1246             TransferFromRunningToSuspended(newState);
1247         } else if (newState == ThreadState::RUNNING) {
1248             TransferToRunning();
1249         } else {
1250             // Here can be some extra checks...
1251             StoreState(newState);
1252         }
1253     }
1254     return oldState;
1255 }
1256 
SuspendThread(bool internalSuspend,SuspendBarrier * barrier)1257 void JSThread::SuspendThread(bool internalSuspend, SuspendBarrier* barrier)
1258 {
1259     ASSERT(!g_isEnableCMCGC);
1260     LockHolder lock(suspendLock_);
1261     if (!internalSuspend) {
1262         // do smth here if we want to combine internal and external suspension
1263     }
1264 
1265     uint32_t old_count = suspendCount_++;
1266     if (old_count == 0) {
1267         SetFlag(ThreadFlag::SUSPEND_REQUEST);
1268         SetCheckSafePointStatus();
1269     }
1270 
1271     if (barrier != nullptr) {
1272         ASSERT(suspendBarrier_ == nullptr);
1273         suspendBarrier_ = barrier;
1274         SetFlag(ThreadFlag::ACTIVE_BARRIER);
1275         SetCheckSafePointStatus();
1276     }
1277 }
1278 
ResumeThread(bool internalSuspend)1279 void JSThread::ResumeThread(bool internalSuspend)
1280 {
1281     ASSERT(!g_isEnableCMCGC);
1282     LockHolder lock(suspendLock_);
1283     if (!internalSuspend) {
1284         // do smth here if we want to combine internal and external suspension
1285     }
1286     if (suspendCount_ > 0) {
1287         suspendCount_--;
1288         if (suspendCount_ == 0) {
1289             ClearFlag(ThreadFlag::SUSPEND_REQUEST);
1290             ResetCheckSafePointStatus();
1291         }
1292     }
1293     suspendCondVar_.Signal();
1294 }
1295 
WaitSuspension()1296 void JSThread::WaitSuspension()
1297 {
1298     if (UNLIKELY(g_isEnableCMCGC)) {
1299         GetThreadHolder()->WaitSuspension();
1300     } else {
1301         constexpr int TIMEOUT = 100;
1302         ThreadState oldState = GetState();
1303         UpdateState(ThreadState::IS_SUSPENDED);
1304         {
1305             OHOS_HITRACE(HITRACE_LEVEL_COMMERCIAL, "SuspendTime::WaitSuspension", "");
1306             LockHolder lock(suspendLock_);
1307             while (suspendCount_ > 0) {
1308                 suspendCondVar_.TimedWait(&suspendLock_, TIMEOUT);
1309                 // we need to do smth if Runtime is terminating at this point
1310             }
1311         }
1312         UpdateState(oldState);
1313     }
1314 }
1315 
ManagedCodeBegin()1316 void JSThread::ManagedCodeBegin()
1317 {
1318     ASSERT(!IsInManagedState());
1319     if (LIKELY(!g_isEnableCMCGC)) {
1320         UpdateState(ThreadState::RUNNING);
1321     } else {
1322         GetThreadHolder()->TransferToRunning();
1323     }
1324 }
1325 
ManagedCodeEnd()1326 void JSThread::ManagedCodeEnd()
1327 {
1328     ASSERT(IsInManagedState());
1329     if (LIKELY(!g_isEnableCMCGC)) {
1330         UpdateState(ThreadState::NATIVE);
1331     } else {
1332         GetThreadHolder()->TransferToNative();
1333     }
1334 }
1335 
TransferFromRunningToSuspended(ThreadState newState)1336 void JSThread::TransferFromRunningToSuspended(ThreadState newState)
1337 {
1338     ASSERT(!g_isEnableCMCGC);
1339     ASSERT(currentThread == this);
1340     StoreSuspendedState(newState);
1341     CheckAndPassActiveBarrier();
1342 }
1343 
UpdateStackInfo(void * stackInfo,StackInfoOpKind opKind)1344 void JSThread::UpdateStackInfo(void *stackInfo, StackInfoOpKind opKind)
1345 {
1346     switch (opKind) {
1347         case SwitchToSubStackInfo: {
1348             StackInfo *subStackInfo = reinterpret_cast<StackInfo*>(stackInfo);
1349             if (subStackInfo == nullptr) {
1350                 LOG_ECMA(ERROR) << "fatal error, subStack not exist";
1351                 break;
1352             }
1353             // process stackLimit
1354             mainStackInfo_.stackLimit = glueData_.stackLimit_;
1355             glueData_.stackLimit_ = subStackInfo->stackLimit;
1356             // process lastLeaveFrame
1357             mainStackInfo_.lastLeaveFrame = reinterpret_cast<uint64_t>(glueData_.leaveFrame_);
1358             glueData_.leaveFrame_ =
1359                 reinterpret_cast<uint64_t *>(subStackInfo->lastLeaveFrame);
1360             isInSubStack_ = true;
1361 
1362             LOG_ECMA(DEBUG) << "Switch to subStack: "
1363                             << ", stack limit: " << glueData_.stackLimit_
1364                             << ", stack lastLeaveFrame: " << glueData_.leaveFrame_;
1365             break;
1366         }
1367         case SwitchToMainStackInfo: {
1368             // process stackLimit
1369             glueData_.stackLimit_ = mainStackInfo_.stackLimit;
1370             // process lastLeaveFrame
1371             glueData_.leaveFrame_ = reinterpret_cast<uint64_t *>(mainStackInfo_.lastLeaveFrame);
1372             isInSubStack_ = false;
1373 
1374             LOG_ECMA(DEBUG) << "Switch to mainStack: "
1375                             << ", main stack limit: " << mainStackInfo_.stackLimit
1376                             << ", main stack lastLeaveFrame: " << mainStackInfo_.lastLeaveFrame;
1377             break;
1378         }
1379         default:
1380             LOG_ECMA(FATAL) << "this branch is unreachable";
1381             UNREACHABLE();
1382     }
1383 }
1384 
TransferToRunning()1385 void JSThread::TransferToRunning()
1386 {
1387     ASSERT(!g_isEnableCMCGC);
1388     ASSERT(!IsDaemonThread());
1389     ASSERT(currentThread == this);
1390     StoreRunningState(ThreadState::RUNNING);
1391     // Invoke free weak global callback when thread switch to running
1392     if (!weakNodeFreeGlobalCallbacks_.empty()) {
1393         InvokeWeakNodeFreeGlobalCallBack();
1394     }
1395     if (fullMarkRequest_) {
1396         fullMarkRequest_ = const_cast<Heap*>(vm_->GetHeap())->TryTriggerFullMarkBySharedLimit();
1397     }
1398 }
1399 
TransferDaemonThreadToRunning()1400 void JSThread::TransferDaemonThreadToRunning()
1401 {
1402     ASSERT(!g_isEnableCMCGC);
1403     ASSERT(IsDaemonThread());
1404     ASSERT(currentThread == this);
1405     StoreRunningState(ThreadState::RUNNING);
1406 }
1407 
StoreState(ThreadState newState)1408 void JSThread::StoreState(ThreadState newState)
1409 {
1410     ASSERT(!g_isEnableCMCGC);
1411     while (true) {
1412         ThreadStateAndFlags oldStateAndFlags;
1413         oldStateAndFlags.asNonvolatileInt = glueData_.stateAndFlags_.asInt;
1414 
1415         ThreadStateAndFlags newStateAndFlags;
1416         newStateAndFlags.asNonvolatileStruct.flags = oldStateAndFlags.asNonvolatileStruct.flags;
1417         newStateAndFlags.asNonvolatileStruct.state = newState;
1418 
1419         bool done = glueData_.stateAndFlags_.asAtomicInt.compare_exchange_weak(oldStateAndFlags.asNonvolatileInt,
1420                                                                                newStateAndFlags.asNonvolatileInt,
1421                                                                                std::memory_order_release);
1422         if (LIKELY(done)) {
1423             break;
1424         }
1425     }
1426 }
1427 
StoreRunningState(ThreadState newState)1428 void JSThread::StoreRunningState([[maybe_unused]] ThreadState newState)
1429 {
1430     ASSERT(!g_isEnableCMCGC);
1431     ASSERT(newState == ThreadState::RUNNING);
1432     while (true) {
1433         ThreadStateAndFlags oldStateAndFlags;
1434         oldStateAndFlags.asNonvolatileInt = glueData_.stateAndFlags_.asInt;
1435         ASSERT(oldStateAndFlags.asNonvolatileStruct.state != ThreadState::RUNNING);
1436 
1437         if (LIKELY(oldStateAndFlags.asNonvolatileStruct.flags == ThreadFlag::NO_FLAGS)) {
1438             ThreadStateAndFlags newStateAndFlags;
1439             newStateAndFlags.asNonvolatileStruct.flags = oldStateAndFlags.asNonvolatileStruct.flags;
1440             newStateAndFlags.asNonvolatileStruct.state = newState;
1441 
1442             if (glueData_.stateAndFlags_.asAtomicInt.compare_exchange_weak(oldStateAndFlags.asNonvolatileInt,
1443                                                                            newStateAndFlags.asNonvolatileInt,
1444                                                                            std::memory_order_release)) {
1445                 break;
1446             }
1447         } else if ((oldStateAndFlags.asNonvolatileStruct.flags & ThreadFlag::ACTIVE_BARRIER) != 0) {
1448             PassSuspendBarrier();
1449         } else if ((oldStateAndFlags.asNonvolatileStruct.flags & ThreadFlag::SUSPEND_REQUEST) != 0) {
1450             constexpr int TIMEOUT = 100;
1451             OHOS_HITRACE(HITRACE_LEVEL_COMMERCIAL, "SuspendTime::StoreRunningState", "");
1452             LockHolder lock(suspendLock_);
1453             while (suspendCount_ > 0) {
1454                 suspendCondVar_.TimedWait(&suspendLock_, TIMEOUT);
1455             }
1456             ASSERT(!HasSuspendRequest());
1457         }
1458     }
1459 }
1460 
StoreSuspendedState(ThreadState newState)1461 inline void JSThread::StoreSuspendedState(ThreadState newState)
1462 {
1463     ASSERT(!g_isEnableCMCGC);
1464     ASSERT(newState != ThreadState::RUNNING);
1465     StoreState(newState);
1466 }
1467 
PostFork()1468 void JSThread::PostFork()
1469 {
1470     SetThreadId();
1471     if (currentThread == nullptr) {
1472         currentThread = this;
1473         if (LIKELY(!g_isEnableCMCGC)) {
1474             ASSERT(GetState() == ThreadState::CREATED);
1475             UpdateState(ThreadState::NATIVE);
1476         } else {
1477             GetThreadHolder()->TransferToNative();
1478         }
1479     } else {
1480         // We tried to call fork in the same thread
1481         ASSERT(currentThread == this);
1482         if (LIKELY(!g_isEnableCMCGC)) {
1483             ASSERT(GetState() == ThreadState::NATIVE);
1484         } else {
1485             GetThreadHolder()->TransferToNative();
1486         }
1487     }
1488 }
1489 #ifndef NDEBUG
IsInManagedState() const1490 bool JSThread::IsInManagedState() const
1491 {
1492     ASSERT(this == JSThread::GetCurrent());
1493     return IsInRunningState();
1494 }
1495 
GetMutatorLockState() const1496 MutatorLock::MutatorLockState JSThread::GetMutatorLockState() const
1497 {
1498     return mutatorLockState_;
1499 }
1500 
SetMutatorLockState(MutatorLock::MutatorLockState newState)1501 void JSThread::SetMutatorLockState(MutatorLock::MutatorLockState newState)
1502 {
1503     mutatorLockState_ = newState;
1504 }
1505 #endif
1506 
GetArrayInstanceHClass(ElementsKind kind,bool isPrototype) const1507 JSHClass *JSThread::GetArrayInstanceHClass(ElementsKind kind, bool isPrototype) const
1508 {
1509     JSHandle<GlobalEnv> env = GetGlobalEnv();
1510     return GetArrayInstanceHClass(env, kind, isPrototype);
1511 }
1512 
GetArrayInstanceHClass(JSHandle<GlobalEnv> env,ElementsKind kind,bool isPrototype) const1513 JSHClass *JSThread::GetArrayInstanceHClass(JSHandle<GlobalEnv> env, ElementsKind kind, bool isPrototype) const
1514 {
1515     GlobalEnvField index = glueData_.arrayHClassIndexes_.GetArrayInstanceHClassIndex(kind, isPrototype);
1516     auto exceptArrayHClass = env->GetGlobalEnvObjectByIndex(static_cast<size_t>(index)).GetTaggedValue();
1517     auto exceptRecvHClass = JSHClass::Cast(exceptArrayHClass.GetTaggedObject());
1518     ASSERT(exceptRecvHClass->IsJSArray());
1519     return exceptRecvHClass;
1520 }
1521 
GetModuleManager() const1522 ModuleManager *JSThread::GetModuleManager() const
1523 {
1524     JSHandle<GlobalEnv> globalEnv = GetGlobalEnv();
1525     JSHandle<JSNativePointer> nativePointer(globalEnv->GetModuleManagerNativePointer());
1526     ModuleManager *moduleManager = reinterpret_cast<ModuleManager *>(nativePointer->GetExternalPointer());
1527     return moduleManager;
1528 }
1529 
GetCurrentGlobalEnv(JSTaggedValue currentEnv)1530 JSTaggedValue JSThread::GetCurrentGlobalEnv(JSTaggedValue currentEnv)
1531 {
1532     auto globalEnv = BaseEnv::Cast(currentEnv.GetTaggedObject())->GetGlobalEnv(this);
1533     if (globalEnv.IsHole()) {
1534         return GetGlueGlobalEnv();
1535     }
1536     return globalEnv;
1537 }
1538 }  // namespace panda::ecmascript
1539