• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_JS_THREAD_H
17 #define ECMASCRIPT_JS_THREAD_H
18 
19 #include <atomic>
20 #include <sstream>
21 #include <string>
22 #include <cstdint>
23 
24 #include "ecmascript/platform/ffrt.h"
25 #include "ecmascript/base/aligned_struct.h"
26 #include "ecmascript/builtin_entries.h"
27 #include "ecmascript/daemon/daemon_task.h"
28 #include "ecmascript/global_index.h"
29 #include "ecmascript/js_handle.h"
30 #include "ecmascript/js_object_resizing_strategy.h"
31 #include "ecmascript/js_tagged_value.h"
32 #include "ecmascript/js_thread_hclass_entries.h"
33 #include "ecmascript/js_thread_stub_entries.h"
34 #include "ecmascript/js_thread_elements_hclass_entries.h"
35 #include "ecmascript/log_wrapper.h"
36 #include "ecmascript/mem/visitor.h"
37 #include "ecmascript/mutator_lock.h"
38 #include "ecmascript/napi/include/jsnapi_expo.h"
39 #include "ecmascript/patch/patch_loader.h"
40 #include "common_components/heap/collector/gc_request.h"
41 #include "common_interfaces/base_runtime.h"
42 #include "common_interfaces/thread/base_thread.h"
43 #include "common_interfaces/thread/thread_holder.h"
44 #include "ecmascript/cross_vm/js_thread_hybrid.h"
45 
46 #if defined(ENABLE_FFRT_INTERFACES)
47 #include "ffrt.h"
48 #include "c/executor_task.h"
49 #endif
50 
51 namespace panda::ecmascript {
52 class DateUtils;
53 class EcmaVM;
54 class GlobalIndex;
55 class HeapRegionAllocator;
56 class PropertiesCache;
57 class MegaICCache;
58 class ModuleLogger;
59 class ModuleManager;
60 template<typename T>
61 class EcmaGlobalStorage;
62 class Node;
63 class DebugNode;
64 class VmThreadControl;
65 class GlobalEnvConstants;
66 enum class ElementsKind : uint8_t;
67 enum class NodeKind : uint8_t;
68 
69 class MachineCode;
70 class DependentInfos;
71 using JitCodeVector = std::vector<std::tuple<MachineCode*, std::string, uintptr_t>>;
72 using JitCodeMapVisitor = std::function<void(std::map<JSTaggedType, JitCodeVector*>&)>;
73 using OnErrorCallback = std::function<void(Local<ObjectRef> value, void *data)>;
74 using WeakClearCallback = void (*)(void *);
75 
76 enum class MarkStatus : uint8_t {
77     READY_TO_MARK,
78     MARKING,
79     MARK_FINISHED,
80 };
81 
82 enum class GCKind : uint8_t {
83     LOCAL_GC,
84     SHARED_GC
85 };
86 
87 enum class PGOProfilerStatus : uint8_t {
88     PGO_PROFILER_DISABLE,
89     PGO_PROFILER_ENABLE,
90 };
91 
92 enum class BCStubStatus: uint8_t {
93     NORMAL_BC_STUB,
94     PROFILE_BC_STUB,
95     JIT_PROFILE_BC_STUB,
96     STW_COPY_BC_STUB,
97 };
98 
99 enum class CommonStubStatus: uint8_t {
100     NORMAL_COMMON_STUB,
101     STW_COPY_COMMON_STUB,
102 };
103 
104 enum class BuiltinsStubStatus: uint8_t {
105     NORMAL_BUILTINS_STUB,
106     STW_COPY_BUILTINS_STUB,
107 };
108 
109 enum ThreadType : uint8_t {
110     JS_THREAD,
111     JIT_THREAD,
112     DAEMON_THREAD,
113 };
114 
115 
116 using BaseThread = common::BaseThread;
117 using BaseThreadType = common::BaseThreadType;
118 using ThreadHolder = common::ThreadHolder;
119 using ThreadFlag = common::ThreadFlag;
120 using ThreadState = common::ThreadState;
121 using ThreadStateAndFlags = common::ThreadStateAndFlags;
122 static constexpr uint32_t THREAD_STATE_OFFSET = common::THREAD_STATE_OFFSET;
123 static constexpr uint32_t THREAD_FLAGS_MASK = common::THREAD_FLAGS_MASK;
124 
125 class SuspendBarrier {
126 public:
SuspendBarrier()127     SuspendBarrier() : passBarrierCount_(0)
128     {
129     }
130 
SuspendBarrier(int32_t count)131     explicit SuspendBarrier(int32_t count) : passBarrierCount_(count)
132     {
133     }
134 
135     void Wait();
136 
PassStrongly()137     void PassStrongly()
138     {
139         [[maybe_unused]] int32_t oldCount = passBarrierCount_.fetch_sub(1, std::memory_order_seq_cst);
140 #if defined(PANDA_USE_FUTEX)
141         if (oldCount == 1) {
142             int32_t *addr = reinterpret_cast<int32_t*>(&passBarrierCount_);
143             futex(addr, FUTEX_WAKE_PRIVATE, INT_MAX, nullptr, nullptr, 0);
144         }
145 #endif
146     }
147 
Initialize(int32_t count)148     void Initialize(int32_t count)
149     {
150         passBarrierCount_.store(count, std::memory_order_relaxed);
151     }
152 
153 private:
154     std::atomic<int32_t> passBarrierCount_;
155 };
156 
157 static constexpr uint32_t MAIN_THREAD_INDEX = 0;
158 
159 class JSThread {
160 public:
161     static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2;
162     static constexpr int CONCURRENT_MARKING_BITFIELD_MASK = 0x3;
163     static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_NUM = 1;
164     static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_MASK = 0x1;
165     static constexpr int READ_BARRIER_STATE_BITFIELD_MASK = 0x2;
166     static constexpr int CMC_GC_PHASE_BITFIELD_START = 8;
167     static constexpr int CMC_GC_PHASE_BITFIELD_NUM = 8;
168     static constexpr int CMC_GC_PHASE_BITFIELD_MASK =
169         (((1 << CMC_GC_PHASE_BITFIELD_NUM) - 1) << CMC_GC_PHASE_BITFIELD_START);
170     static constexpr int CMC_GC_REASON_BITFIELD_NUM = 32;
171     static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8;
172     static constexpr int PGO_PROFILER_BITFIELD_START = 16;
173     static constexpr int BOOL_BITFIELD_NUM = 1;
174     static constexpr int BCSTUBSTATUS_BITFIELD_NUM = 2;
175     static constexpr uint32_t RESERVE_STACK_SIZE = 128;
176     static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB;
177     using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>;
178     using SharedMarkStatusBits = BitField<SharedMarkStatus, 0, SHARED_CONCURRENT_MARKING_BITFIELD_NUM>; // 0
179     using ReadBarrierStateBit = SharedMarkStatusBits::NextFlag; // 1
180     using CMCGCPhaseBits = BitField<common::GCPhase, CMC_GC_PHASE_BITFIELD_START, CMC_GC_PHASE_BITFIELD_NUM>; // 8-15
181     using CMCGCReasonBits = CMCGCPhaseBits::NextField<common::GCReason, CMC_GC_REASON_BITFIELD_NUM>;
182     using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>;
183     using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>;
184     using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag;
185     using InstallMachineCodeBit = VMHasSuspendedBit::NextFlag;
186     using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>;
187     using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BCSTUBSTATUS_BITFIELD_NUM>;
188     using CommonStubStatusBits = BCStubStatusBits::NextField<CommonStubStatus, BOOL_BITFIELD_NUM>;
189     using BuiltinsStubStatusBits = CommonStubStatusBits::NextField<BuiltinsStubStatus, BOOL_BITFIELD_NUM>;
190     using ThreadId = uint32_t;
191 
192     enum FrameDroppedState {
193         StateFalse = 0,
194         StateTrue,
195         StatePending
196     };
197 
198     enum StackInfoOpKind : uint32_t {
199         SwitchToSubStackInfo = 0,
200         SwitchToMainStackInfo,
201     };
202 
203     struct StackInfo {
204         uint64_t stackLimit;
205         uint64_t lastLeaveFrame;
206     };
207 
208     explicit JSThread(EcmaVM *vm);
209     // only used in jit thread
210     explicit JSThread(EcmaVM *vm, ThreadType threadType);
211     // only used in daemon thread
212     explicit JSThread(ThreadType threadType);
213 
214     PUBLIC_API ~JSThread();
215 
GetEcmaVM()216     EcmaVM *GetEcmaVM() const
217     {
218         return vm_;
219     }
220 
221     static JSThread *Create(EcmaVM *vm);
222 
223     static JSThread *GetCurrent();
224 
GetNestedLevel()225     int GetNestedLevel() const
226     {
227         return nestedLevel_;
228     }
229 
SetNestedLevel(int level)230     void SetNestedLevel(int level)
231     {
232         nestedLevel_ = level;
233     }
234 
SetLastFp(JSTaggedType * fp)235     void SetLastFp(JSTaggedType *fp)
236     {
237         glueData_.lastFp_ = fp;
238     }
239 
GetLastFp()240     const JSTaggedType *GetLastFp() const
241     {
242         return glueData_.lastFp_;
243     }
244 
GetCurrentSPFrame()245     const JSTaggedType *GetCurrentSPFrame() const
246     {
247         return glueData_.currentFrame_;
248     }
249 
SetCurrentSPFrame(JSTaggedType * sp)250     void SetCurrentSPFrame(JSTaggedType *sp)
251     {
252         glueData_.currentFrame_ = sp;
253     }
254 
GetLastLeaveFrame()255     const JSTaggedType *GetLastLeaveFrame() const
256     {
257         return glueData_.leaveFrame_;
258     }
259 
SetLastLeaveFrame(JSTaggedType * sp)260     void SetLastLeaveFrame(JSTaggedType *sp)
261     {
262         glueData_.leaveFrame_ = sp;
263     }
264 
265     const JSTaggedType *GetCurrentFrame() const;
266 
267     void SetCurrentFrame(JSTaggedType *sp);
268 
269     const JSTaggedType *GetCurrentInterpretedFrame() const;
270 
271     bool DoStackOverflowCheck(const JSTaggedType *sp);
272 
273     bool DoStackLimitCheck();
274 
GetNativeAreaAllocator()275     NativeAreaAllocator *GetNativeAreaAllocator() const
276     {
277         return nativeAreaAllocator_;
278     }
279 
GetHeapRegionAllocator()280     HeapRegionAllocator *GetHeapRegionAllocator() const
281     {
282         return heapRegionAllocator_;
283     }
284 
ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)285     void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
286     {
287         glueData_.newSpaceAllocationTopAddress_ = top;
288         glueData_.newSpaceAllocationEndAddress_ = end;
289     }
290 
ReSetSOldSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)291     void ReSetSOldSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
292     {
293         glueData_.sOldSpaceAllocationTopAddress_ = top;
294         glueData_.sOldSpaceAllocationEndAddress_ = end;
295     }
296 
ReSetSNonMovableSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)297     void ReSetSNonMovableSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
298     {
299         glueData_.sNonMovableSpaceAllocationTopAddress_ = top;
300         glueData_.sNonMovableSpaceAllocationEndAddress_ = end;
301     }
302 
GetUnsharedConstpools()303     uintptr_t GetUnsharedConstpools() const
304     {
305         return glueData_.unsharedConstpools_;
306     }
307 
SetUnsharedConstpools(uintptr_t unsharedConstpools)308     void SetUnsharedConstpools(uintptr_t unsharedConstpools)
309     {
310         glueData_.unsharedConstpools_ = unsharedConstpools;
311     }
312 
GetUnsharedConstpoolsArrayLen()313     uintptr_t GetUnsharedConstpoolsArrayLen() const
314     {
315         return glueData_.unsharedConstpoolsArrayLen_;
316     }
317 
SetUnsharedConstpoolsArrayLen(uint32_t unsharedConstpoolsArrayLen)318     void SetUnsharedConstpoolsArrayLen(uint32_t unsharedConstpoolsArrayLen)
319     {
320         glueData_.unsharedConstpoolsArrayLen_ = unsharedConstpoolsArrayLen;
321     }
322 
SetIsStartHeapSampling(bool isStart)323     void SetIsStartHeapSampling(bool isStart)
324     {
325         glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False();
326     }
327 
SetIsTracing(bool isTracing)328     void SetIsTracing(bool isTracing)
329     {
330         glueData_.isTracing_ = isTracing;
331     }
332 
333     void Iterate(RootVisitor &visitor);
334 
335     void IterateJitCodeMap(const JitCodeMapVisitor &updater);
336 
337     void IterateMegaIC(RootVisitor &v);
338     void ClearMegaIC();
339 
340     void IterateHandleWithCheck(RootVisitor &visitor);
341 
342     void ClearCache();
343 
344     void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const;
345     bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const;
346 
347     void PUBLIC_API SetException(JSTaggedValue exception);
348 
GetException()349     JSTaggedValue GetException() const
350     {
351         return glueData_.exception_;
352     }
353 
HasPendingException()354     bool HasPendingException() const
355     {
356         return !glueData_.exception_.IsHole();
357     }
358 
ClearException()359     void ClearException()
360     {
361         glueData_.exception_ = JSTaggedValue::Hole();
362     }
363 
GlobalConstants()364     const GlobalEnvConstants *GlobalConstants() const
365     {
366         return glueData_.globalConst_;
367     }
368 
SetGlobalConstants(const GlobalEnvConstants * constants)369     void SetGlobalConstants(const GlobalEnvConstants *constants)
370     {
371         glueData_.globalConst_ = const_cast<GlobalEnvConstants*>(constants);
372     }
373 
GetBuiltinEntriesPointer()374     BuiltinEntries* GetBuiltinEntriesPointer()
375     {
376         return &glueData_.builtinEntries_;
377     }
378 
GetCtorHclassEntries()379     const CMap<JSHClass *, GlobalIndex> &GetCtorHclassEntries() const
380     {
381         return ctorHclassEntries_;
382     }
383 
AddToCallsiteSpToReturnAddrTable(uintptr_t callSiteSp,uintptr_t returnAddr)384     void AddToCallsiteSpToReturnAddrTable(uintptr_t callSiteSp, uintptr_t returnAddr)
385     {
386         ASSERT(callSiteSpToReturnAddrTable_.find(callSiteSp) == callSiteSpToReturnAddrTable_.end());
387         callSiteSpToReturnAddrTable_[callSiteSp] = returnAddr;
388     }
389 
GetCallSiteReturnAddr(uintptr_t callSiteSp)390     uintptr_t GetCallSiteReturnAddr(uintptr_t callSiteSp)
391     {
392         ASSERT(callSiteSpToReturnAddrTable_.find(callSiteSp) != callSiteSpToReturnAddrTable_.end());
393         return callSiteSpToReturnAddrTable_[callSiteSp];
394     }
395 
396     uintptr_t GetAndClearCallSiteReturnAddr(uintptr_t callSiteSp);
397 
398     void SetInitialBuiltinHClass(
399         BuiltinTypeId type, JSHClass *builtinHClass, JSHClass *instanceHClass,
400                             JSHClass *prototypeHClass, JSHClass *prototypeOfPrototypeHClass = nullptr,
401                             JSHClass *extraHClass = nullptr);
402 
403     void SetInitialBuiltinGlobalHClass(JSHClass *builtinHClass, GlobalIndex globalIndex);
404 
405     JSHClass *GetBuiltinHClass(BuiltinTypeId type) const;
406 
407     JSHClass *GetBuiltinInstanceHClass(BuiltinTypeId type) const;
408     JSHClass *GetBuiltinExtraHClass(BuiltinTypeId type) const;
409 
410     JSHClass *GetArrayInstanceHClass(ElementsKind kind, bool isPrototype) const;
411     JSHClass *GetArrayInstanceHClass(JSHandle<GlobalEnv> env, ElementsKind kind, bool isPrototype) const;
412 
GetArrayInstanceHClassIndex(ElementsKind kind,bool isPrototype)413     GlobalEnvField GetArrayInstanceHClassIndex(ElementsKind kind, bool isPrototype) const
414     {
415         return glueData_.arrayHClassIndexes_.GetArrayInstanceHClassIndex(kind, isPrototype);
416     }
417 
418     PUBLIC_API JSHClass *GetBuiltinPrototypeHClass(BuiltinTypeId type) const;
419     PUBLIC_API JSHClass *GetBuiltinPrototypeOfPrototypeHClass(BuiltinTypeId type) const;
420 
421     static size_t GetBuiltinHClassOffset(BuiltinTypeId, bool isArch32);
422 
423     static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId, bool isArch32);
424 
GetBuiltinHClassEntries()425     const BuiltinHClassEntries &GetBuiltinHClassEntries() const
426     {
427         return glueData_.builtinHClassEntries_;
428     }
429 
430     JSTaggedValue GetCurrentLexenv() const;
431     JSTaggedValue GetCurrentFunction() const;
432 
RegisterRTInterface(size_t id,Address addr)433     void RegisterRTInterface(size_t id, Address addr)
434     {
435         ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS);
436         glueData_.rtStubEntries_.Set(id, addr);
437     }
438 
GetRTInterface(size_t id)439     Address GetRTInterface(size_t id) const
440     {
441         ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS);
442         return glueData_.rtStubEntries_.Get(id);
443     }
444 
GetFastStubEntry(uint32_t id)445     Address GetFastStubEntry(uint32_t id) const
446     {
447         return glueData_.coStubEntries_.Get(id);
448     }
449 
SetFastStubEntry(size_t id,Address entry)450     void SetFastStubEntry(size_t id, Address entry)
451     {
452         glueData_.coStubEntries_.Set(id, entry);
453     }
454 
GetBuiltinStubEntry(uint32_t id)455     Address GetBuiltinStubEntry(uint32_t id) const
456     {
457         return glueData_.builtinStubEntries_.Get(id);
458     }
459 
SetBuiltinStubEntry(size_t id,Address entry)460     void SetBuiltinStubEntry(size_t id, Address entry)
461     {
462         glueData_.builtinStubEntries_.Set(id, entry);
463     }
464 
GetBCStubEntry(uint32_t id)465     Address GetBCStubEntry(uint32_t id) const
466     {
467         return glueData_.bcStubEntries_.Get(id);
468     }
469 
SetBCStubEntry(size_t id,Address entry)470     void SetBCStubEntry(size_t id, Address entry)
471     {
472         glueData_.bcStubEntries_.Set(id, entry);
473     }
474 
GetBaselineStubEntry(uint32_t id)475     Address GetBaselineStubEntry(uint32_t id) const
476     {
477         return glueData_.baselineStubEntries_.Get(id);
478     }
479 
SetBaselineStubEntry(size_t id,Address entry)480     void SetBaselineStubEntry(size_t id, Address entry)
481     {
482         glueData_.baselineStubEntries_.Set(id, entry);
483     }
484 
SetBCDebugStubEntry(size_t id,Address entry)485     void SetBCDebugStubEntry(size_t id, Address entry)
486     {
487         glueData_.bcDebuggerStubEntries_.Set(id, entry);
488     }
489 
GetBytecodeHandler()490     Address *GetBytecodeHandler()
491     {
492         return glueData_.bcStubEntries_.GetAddr();
493     }
494 
495     void PUBLIC_API CheckSwitchDebuggerBCStub();
496     void CheckOrSwitchPGOStubs();
497     void SwitchJitProfileStubs(bool isEnablePgo);
498     void SwitchStwCopyBCStubs(bool isStwCopy);
499     void SwitchStwCopyCommonStubs(bool isStwCopy);
500     void SwitchStwCopyBuiltinsStubs(bool isStwCopy);
501 
GetThreadId()502     ThreadId GetThreadId() const
503     {
504         return id_.load(std::memory_order_acquire);
505     }
506 
507     void PostFork();
508 
509     static ThreadId GetCurrentThreadId();
510 
511     void IterateWeakEcmaGlobalStorage(WeakVisitor &visitor);
512 
513     void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor, GCKind gcKind = GCKind::LOCAL_GC);
514 
515     void UpdateJitCodeMapReference(const WeakRootVisitor &visitor);
516 
517     PUBLIC_API PropertiesCache *GetPropertiesCache() const;
518     PUBLIC_API MegaICCache *GetLoadMegaICCache() const;
519     PUBLIC_API MegaICCache *GetStoreMegaICCache() const;
520 
GetMarkStatus()521     MarkStatus GetMarkStatus() const
522     {
523         return MarkStatusBits::Decode(glueData_.gcStateBitField_);
524     }
525 
SetMarkStatus(MarkStatus status)526     void SetMarkStatus(MarkStatus status)
527     {
528         MarkStatusBits::Set(status, &glueData_.gcStateBitField_);
529     }
530 
IsConcurrentMarkingOrFinished()531     bool IsConcurrentMarkingOrFinished() const
532     {
533         return !IsReadyToConcurrentMark();
534     }
535 
IsReadyToConcurrentMark()536     bool IsReadyToConcurrentMark() const
537     {
538         auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
539         return status == MarkStatus::READY_TO_MARK;
540     }
541 
IsMarking()542     bool IsMarking() const
543     {
544         auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
545         return status == MarkStatus::MARKING;
546     }
547 
IsMarkFinished()548     bool IsMarkFinished() const
549     {
550         auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
551         return status == MarkStatus::MARK_FINISHED;
552     }
553 
GetSharedMarkStatus()554     SharedMarkStatus GetSharedMarkStatus() const
555     {
556         return SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
557     }
558 
SetSharedMarkStatus(SharedMarkStatus status)559     void SetSharedMarkStatus(SharedMarkStatus status)
560     {
561         SharedMarkStatusBits::Set(status, &glueData_.sharedGCStateBitField_);
562     }
563 
IsSharedConcurrentMarkingOrFinished()564     bool IsSharedConcurrentMarkingOrFinished() const
565     {
566         auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
567         return status == SharedMarkStatus::CONCURRENT_MARKING_OR_FINISHED;
568     }
569 
IsReadyToSharedConcurrentMark()570     bool IsReadyToSharedConcurrentMark() const
571     {
572         auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
573         return status == SharedMarkStatus::READY_TO_CONCURRENT_MARK;
574     }
575 
NeedReadBarrier()576     bool NeedReadBarrier() const
577     {
578         return ReadBarrierStateBit::Decode(glueData_.sharedGCStateBitField_);
579     }
580 
SetReadBarrierState(bool flag)581     void SetReadBarrierState(bool flag)
582     {
583         ReadBarrierStateBit::Set(flag, &glueData_.sharedGCStateBitField_);
584     }
585 
GetCMCGCPhase()586     common::GCPhase GetCMCGCPhase() const
587     {
588         return CMCGCPhaseBits::Decode(glueData_.sharedGCStateBitField_);
589     }
590 
SetCMCGCPhase(common::GCPhase gcPhase)591     void SetCMCGCPhase(common::GCPhase gcPhase)
592     {
593         CMCGCPhaseBits::Set(gcPhase, &glueData_.sharedGCStateBitField_);
594     }
595 
GetCMCGCReason()596     common::GCReason GetCMCGCReason() const
597     {
598         return CMCGCReasonBits::Decode(glueData_.sharedGCStateBitField_);
599     }
600 
SetCMCGCReason(common::GCReason gcReason)601     void SetCMCGCReason(common::GCReason gcReason)
602     {
603         CMCGCReasonBits::Set(gcReason, &glueData_.sharedGCStateBitField_);
604     }
605 
SetPGOProfilerEnable(bool enable)606     void SetPGOProfilerEnable(bool enable)
607     {
608         PGOProfilerStatus status =
609             enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE;
610         SetInterruptValue<PGOStatusBits>(status);
611     }
612 
IsPGOProfilerEnable()613     bool IsPGOProfilerEnable() const
614     {
615         auto status = PGOStatusBits::Decode(glueData_.interruptVector_);
616         return status == PGOProfilerStatus::PGO_PROFILER_ENABLE;
617     }
618 
SetBCStubStatus(BCStubStatus status)619     void SetBCStubStatus(BCStubStatus status)
620     {
621         SetInterruptValue<BCStubStatusBits>(status);
622     }
623 
GetBCStubStatus()624     BCStubStatus GetBCStubStatus() const
625     {
626         return BCStubStatusBits::Decode(glueData_.interruptVector_);
627     }
628 
SetCommonStubStatus(CommonStubStatus status)629     void SetCommonStubStatus(CommonStubStatus status)
630     {
631         SetInterruptValue<CommonStubStatusBits>(status);
632     }
633 
GetCommonStubStatus()634     CommonStubStatus GetCommonStubStatus() const
635     {
636         return CommonStubStatusBits::Decode(glueData_.interruptVector_);
637     }
638 
SetBuiltinsStubStatus(BuiltinsStubStatus status)639     void SetBuiltinsStubStatus(BuiltinsStubStatus status)
640     {
641         SetInterruptValue<BuiltinsStubStatusBits>(status);
642     }
643 
GetBuiltinsStubStatus()644     BuiltinsStubStatus GetBuiltinsStubStatus() const
645     {
646         return BuiltinsStubStatusBits::Decode(glueData_.interruptVector_);
647     }
648 
649     bool ShouldHandleMarkingFinishedInSafepoint();
650 
651     bool CheckSafepoint();
652 
653     void CheckAndPassActiveBarrier();
654 
655     bool PassSuspendBarrier();
656 
SetGetStackSignal(bool isParseStack)657     void SetGetStackSignal(bool isParseStack)
658     {
659         getStackSignal_ = isParseStack;
660     }
661 
GetStackSignal()662     bool GetStackSignal() const
663     {
664         return getStackSignal_;
665     }
666 
SetNeedProfiling(bool needProfiling)667     void SetNeedProfiling(bool needProfiling)
668     {
669         needProfiling_.store(needProfiling);
670     }
671 
SetIsProfiling(bool isProfiling)672     void SetIsProfiling(bool isProfiling)
673     {
674         isProfiling_ = isProfiling;
675     }
676 
GetIsProfiling()677     bool GetIsProfiling() const
678     {
679         return isProfiling_;
680     }
681 
SetGcState(bool gcState)682     void SetGcState(bool gcState)
683     {
684         gcState_ = gcState;
685     }
686 
GetGcState()687     bool GetGcState() const
688     {
689         return gcState_;
690     }
691 
SetRuntimeState(bool runtimeState)692     void SetRuntimeState(bool runtimeState)
693     {
694         runtimeState_ = runtimeState;
695     }
696 
GetRuntimeState()697     bool GetRuntimeState() const
698     {
699         return runtimeState_;
700     }
701 
SetMainThread()702     bool SetMainThread()
703     {
704         return isMainThread_ = true;
705     }
706 
IsMainThreadFast()707     bool IsMainThreadFast() const
708     {
709         return isMainThread_;
710     }
711 
SetCpuProfileName(std::string & profileName)712     void SetCpuProfileName(std::string &profileName)
713     {
714         profileName_ = profileName;
715     }
716 
EnableAsmInterpreter()717     void EnableAsmInterpreter()
718     {
719         isAsmInterpreter_ = true;
720     }
721 
IsAsmInterpreter()722     bool IsAsmInterpreter() const
723     {
724         return isAsmInterpreter_;
725     }
726 
GetVmThreadControl()727     VmThreadControl *GetVmThreadControl() const
728     {
729         return vmThreadControl_;
730     }
731 
SetEnableStackSourceFile(bool value)732     void SetEnableStackSourceFile(bool value)
733     {
734         enableStackSourceFile_ = value;
735     }
736 
GetEnableStackSourceFile()737     bool GetEnableStackSourceFile() const
738     {
739         return enableStackSourceFile_;
740     }
741 
SetEnableLazyBuiltins(bool value)742     void SetEnableLazyBuiltins(bool value)
743     {
744         enableLazyBuiltins_ = value;
745     }
746 
GetEnableLazyBuiltins()747     bool GetEnableLazyBuiltins() const
748     {
749         return enableLazyBuiltins_;
750     }
751 
SetInGlobalEnvInitialize(bool value)752     void SetInGlobalEnvInitialize(bool value)
753     {
754         inGlobalEnvInitialize_ = value;
755     }
756 
InGlobalEnvInitialize()757     bool InGlobalEnvInitialize() const
758     {
759         return inGlobalEnvInitialize_;
760     }
761 
SetReadyForGCIterating(bool flag)762     void SetReadyForGCIterating(bool flag)
763     {
764         readyForGCIterating_ = flag;
765     }
766 
ReadyForGCIterating()767     bool ReadyForGCIterating() const
768     {
769         return readyForGCIterating_;
770     }
771 
EnableUserUncaughtErrorHandler()772     void EnableUserUncaughtErrorHandler()
773     {
774         isUncaughtExceptionRegistered_ = true;
775     }
776 
777     void HandleUncaughtException();
778     void HandleUncaughtException(JSTaggedValue exception);
779 
SetOnErrorCallback(OnErrorCallback callback,void * data)780     void SetOnErrorCallback(OnErrorCallback callback, void* data)
781     {
782         onErrorCallback_ = callback;
783         onErrorData_ = data;
784     }
785 
GetOnErrorCallback()786     OnErrorCallback GetOnErrorCallback()
787     {
788         return onErrorCallback_;
789     }
790 
GetOnErrorData()791     void* GetOnErrorData()
792     {
793         return onErrorData_;
794     }
795 
GetGlueDataOffset()796     static constexpr size_t GetGlueDataOffset()
797     {
798         return MEMBER_OFFSET(JSThread, glueData_);
799     }
800 
GetGlueAddr()801     uintptr_t GetGlueAddr() const
802     {
803         return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset();
804     }
805 
GlueToJSThread(uintptr_t glue)806     static JSThread *GlueToJSThread(uintptr_t glue)
807     {
808         // very careful to modify here
809         return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset());
810     }
811 
SetCheckSafePointStatus()812     void SetCheckSafePointStatus()
813     {
814         ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1);
815         SetInterruptValue<CheckSafePointBit>(true);
816     }
817 
ResetCheckSafePointStatus()818     void ResetCheckSafePointStatus()
819     {
820         ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1);
821         SetInterruptValue<CheckSafePointBit>(false);
822     }
823 
SetVMNeedSuspension(bool flag)824     void SetVMNeedSuspension(bool flag)
825     {
826         SetInterruptValue<VMNeedSuspensionBit>(flag);
827     }
828 
VMNeedSuspension()829     bool VMNeedSuspension()
830     {
831         return VMNeedSuspensionBit::Decode(glueData_.interruptVector_);
832     }
833 
SetVMSuspended(bool flag)834     void SetVMSuspended(bool flag)
835     {
836         SetInterruptValue<VMHasSuspendedBit>(flag);
837     }
838 
IsVMSuspended()839     bool IsVMSuspended()
840     {
841         return VMHasSuspendedBit::Decode(glueData_.interruptVector_);
842     }
843 
HasTerminationRequest()844     bool HasTerminationRequest() const
845     {
846         return needTermination_;
847     }
848 
SetTerminationRequest(bool flag)849     void SetTerminationRequest(bool flag)
850     {
851         needTermination_ = flag;
852     }
853 
SetVMTerminated(bool flag)854     void SetVMTerminated(bool flag)
855     {
856         hasTerminated_ = flag;
857     }
858 
HasTerminated()859     bool HasTerminated() const
860     {
861         return hasTerminated_;
862     }
863 
864     void TerminateExecution();
865 
SetInstallMachineCode(bool flag)866     void SetInstallMachineCode(bool flag)
867     {
868         SetInterruptValue<InstallMachineCodeBit>(flag);
869     }
870 
HasInstallMachineCode()871     bool HasInstallMachineCode() const
872     {
873         return InstallMachineCodeBit::Decode(glueData_.interruptVector_);
874     }
875 
GetCurrentStackPosition()876     static uintptr_t GetCurrentStackPosition()
877     {
878         return reinterpret_cast<uintptr_t>(__builtin_frame_address(0));
879     }
880 
881     bool IsLegalAsmSp(uintptr_t sp) const;
882 
883     bool IsLegalThreadSp(uintptr_t sp) const;
884 
885     bool IsLegalSp(uintptr_t sp) const;
886 
SetCheckAndCallEnterState(bool state)887     void SetCheckAndCallEnterState(bool state)
888     {
889         finalizationCheckState_ = state;
890     }
891 
GetCheckAndCallEnterState()892     bool GetCheckAndCallEnterState() const
893     {
894         return finalizationCheckState_;
895     }
896 
GetStackStart()897     uint64_t GetStackStart() const
898     {
899         return glueData_.stackStart_;
900     }
901 
GetStackLimit()902     uint64_t GetStackLimit() const
903     {
904         return glueData_.stackLimit_;
905     }
906 
907     JSHandle<GlobalEnv> PUBLIC_API GetGlobalEnv() const;
908 
909     JSTaggedValue PUBLIC_API GetCurrentGlobalEnv(JSTaggedValue currentEnv);
910 
GetGlueGlobalEnv()911     JSTaggedValue GetGlueGlobalEnv() const
912     {
913         // change to current
914         return glueData_.currentEnv_;
915     }
916 
SetGlueGlobalEnv(JSTaggedValue env)917     void SetGlueGlobalEnv(JSTaggedValue env)
918     {
919         ASSERT(env != JSTaggedValue::Hole());
920         glueData_.currentEnv_ = env;
921     }
922 
NewGlobalHandle(JSTaggedType value)923     inline uintptr_t NewGlobalHandle(JSTaggedType value)
924     {
925         return newGlobalHandle_(value);
926     }
927 
DisposeGlobalHandle(uintptr_t nodeAddr)928     inline void DisposeGlobalHandle(uintptr_t nodeAddr)
929     {
930         disposeGlobalHandle_(nodeAddr);
931     }
932 
933     inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr,
934                              WeakClearCallback nativeFinalizeCallBack = nullptr)
935     {
936         return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack);
937     }
938 
ClearWeak(uintptr_t nodeAddr)939     inline uintptr_t ClearWeak(uintptr_t nodeAddr)
940     {
941         return clearWeak_(nodeAddr);
942     }
943 
IsWeak(uintptr_t addr)944     inline bool IsWeak(uintptr_t addr) const
945     {
946         return isWeak_(addr);
947     }
948 
EnableCrossThreadExecution()949     void EnableCrossThreadExecution()
950     {
951         glueData_.allowCrossThreadExecution_ = true;
952     }
953 
IsCrossThreadExecutionEnable()954     bool IsCrossThreadExecutionEnable() const
955     {
956         return glueData_.allowCrossThreadExecution_;
957     }
958 
IsFrameDropped()959     bool IsFrameDropped()
960     {
961         return glueData_.isFrameDropped_;
962     }
963 
SetFrameDroppedState()964     void SetFrameDroppedState()
965     {
966         glueData_.isFrameDropped_ = true;
967     }
968 
ResetFrameDroppedState()969     void ResetFrameDroppedState()
970     {
971         glueData_.isFrameDropped_ = false;
972     }
973 
IsEntryFrameDroppedTrue()974     bool IsEntryFrameDroppedTrue()
975     {
976         return glueData_.entryFrameDroppedState_ == FrameDroppedState::StateTrue;
977     }
978 
IsEntryFrameDroppedPending()979     bool IsEntryFrameDroppedPending()
980     {
981         return glueData_.entryFrameDroppedState_ == FrameDroppedState::StatePending;
982     }
983 
SetEntryFrameDroppedState()984     void SetEntryFrameDroppedState()
985     {
986         glueData_.entryFrameDroppedState_ = FrameDroppedState::StateTrue;
987     }
988 
ResetEntryFrameDroppedState()989     void ResetEntryFrameDroppedState()
990     {
991         glueData_.entryFrameDroppedState_ = FrameDroppedState::StateFalse;
992     }
993 
PendingEntryFrameDroppedState()994     void PendingEntryFrameDroppedState()
995     {
996         glueData_.entryFrameDroppedState_ = FrameDroppedState::StatePending;
997     }
998 
IsDebugMode()999     bool IsDebugMode()
1000     {
1001         return glueData_.isDebugMode_;
1002     }
1003 
SetDebugModeState()1004     void SetDebugModeState()
1005     {
1006         glueData_.isDebugMode_ = true;
1007     }
1008 
ResetDebugModeState()1009     void ResetDebugModeState()
1010     {
1011         glueData_.isDebugMode_ = false;
1012     }
1013 
1014     template<typename T, typename V>
SetInterruptValue(V value)1015     void SetInterruptValue(V value)
1016     {
1017         volatile auto interruptValue =
1018             reinterpret_cast<volatile std::atomic<uint64_t> *>(&glueData_.interruptVector_);
1019         uint64_t oldValue = interruptValue->load(std::memory_order_relaxed);
1020         auto newValue = oldValue;
1021         do {
1022             newValue = oldValue;
1023             T::Set(value, &newValue);
1024         } while (!std::atomic_compare_exchange_strong_explicit(interruptValue, &oldValue, newValue,
1025                                                                std::memory_order_release,
1026                                                                std::memory_order_relaxed));
1027     }
1028 
1029     void InvokeWeakNodeFreeGlobalCallBack();
1030     void InvokeWeakNodeNativeFinalizeCallback();
1031     bool IsStartGlobalLeakCheck() const;
1032     bool EnableGlobalObjectLeakCheck() const;
1033     bool EnableGlobalPrimitiveLeakCheck() const;
1034     void WriteToStackTraceFd(std::ostringstream &buffer) const;
1035     void SetStackTraceFd(int32_t fd);
1036     void CloseStackTraceFd();
IncreaseGlobalNumberCount()1037     uint32_t IncreaseGlobalNumberCount()
1038     {
1039         return ++globalNumberCount_;
1040     }
1041 
SetPropertiesGrowStep(uint32_t step)1042     void SetPropertiesGrowStep(uint32_t step)
1043     {
1044         glueData_.propertiesGrowStep_ = step;
1045     }
1046 
GetPropertiesGrowStep()1047     uint32_t GetPropertiesGrowStep() const
1048     {
1049         return glueData_.propertiesGrowStep_;
1050     }
1051 
SetRandomStatePtr(uint64_t * ptr)1052     void SetRandomStatePtr(uint64_t *ptr)
1053     {
1054         glueData_.randomStatePtr_ = reinterpret_cast<uintptr_t>(ptr);
1055     }
1056 
SetTaskInfo(uintptr_t taskInfo)1057     void SetTaskInfo(uintptr_t taskInfo)
1058     {
1059         glueData_.taskInfo_ = taskInfo;
1060     }
1061 
GetTaskInfo()1062     uintptr_t GetTaskInfo() const
1063     {
1064         return glueData_.taskInfo_;
1065     }
1066 
1067     void SetJitCodeMap(JSTaggedType exception,  MachineCode* machineCode, std::string &methodName, uintptr_t offset);
1068 
GetJitCodeMaps()1069     std::map<JSTaggedType, JitCodeVector*> &GetJitCodeMaps()
1070     {
1071         return jitCodeMaps_;
1072     }
1073 
IsEnableMutantArray()1074     bool IsEnableMutantArray() const
1075     {
1076         return glueData_.isEnableMutantArray_;
1077     }
1078 
IsEnableElementsKind()1079     bool IsEnableElementsKind() const
1080     {
1081         return glueData_.IsEnableElementsKind_;
1082     }
1083 
IsEnableCMCGC()1084     uint32_t PUBLIC_API IsEnableCMCGC() const
1085     {
1086         return glueData_.isEnableCMCGC_;
1087     }
1088 
SetEnableCMCGC(bool enableCMCGC)1089     void SetEnableCMCGC(bool enableCMCGC)
1090     {
1091         glueData_.isEnableCMCGC_ = enableCMCGC;
1092     }
1093 
GetAllocBuffer()1094     uintptr_t GetAllocBuffer() const
1095     {
1096         return glueData_.allocBuffer_;
1097     }
1098 
OnHeapCreated(uintptr_t startAddr)1099     void OnHeapCreated(uintptr_t startAddr)
1100     {
1101         glueData_.heapStartAddr_ = startAddr;
1102         glueData_.heapCurrentEnd_ = 0;
1103     }
1104 
OnHeapExtended(uintptr_t newEnd)1105     void OnHeapExtended(uintptr_t newEnd)
1106     {
1107         glueData_.heapCurrentEnd_ = newEnd;
1108     }
1109 
1110     struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(),
1111                                                  BCStubEntries,
1112                                                  base::AlignedBool,
1113                                                  base::AlignedPointer,
1114                                                  base::AlignedPointer,
1115                                                  base::AlignedPointer,
1116                                                  base::AlignedPointer,
1117                                                  base::AlignedPointer,
1118                                                  JSTaggedValue,
1119                                                  base::AlignedPointer,
1120                                                  base::AlignedPointer,
1121                                                  base::AlignedPointer,
1122                                                  base::AlignedPointer,
1123                                                  base::AlignedPointer,
1124                                                  base::AlignedPointer,
1125                                                  base::AlignedPointer,
1126                                                  base::AlignedPointer,
1127                                                  base::AlignedPointer,
1128                                                  base::AlignedPointer,
1129                                                  RTStubEntries,
1130                                                  COStubEntries,
1131                                                  BuiltinStubEntries,
1132                                                  BuiltinHClassEntries,
1133                                                  BCDebuggerStubEntries,
1134                                                  BaselineStubEntries,
1135                                                  base::AlignedUint64,
1136                                                  base::AlignedUint64,
1137                                                  base::AlignedPointer,
1138                                                  JSTaggedValue,
1139                                                  base::AlignedUint64,
1140                                                  base::AlignedUint64,
1141                                                  base::AlignedPointer,
1142                                                  base::AlignedUint64,
1143                                                  base::AlignedUint64,
1144                                                  JSTaggedValue,
1145                                                  base::AlignedBool,
1146                                                  base::AlignedBool,
1147                                                  base::AlignedUint32,
1148                                                  base::AlignedPointer,
1149                                                  BuiltinEntries,
1150                                                  base::AlignedBool,
1151                                                  base::AlignedUint32,
1152                                                  base::AlignedPointer,
1153                                                  base::AlignedPointer,
1154                                                  base::AlignedUint32,
1155                                                  base::AlignedBool,
1156                                                  base::AlignedBool,
1157                                                  base::AlignedPointer,
1158                                                  base::AlignedPointer,
1159                                                  base::AlignedPointer,
1160                                                  base::AlignedUint64,
1161                                                  base::AlignedUint64,
1162                                                  base::AlignedUint64,
1163                                                  ElementsHClassEntries,
1164                                                  base::AlignedPointer,
1165                                                  base::AlignedUint32,
1166                                                  base::AlignedBool> {
1167         enum class Index : size_t {
1168             BcStubEntriesIndex = 0,
1169             IsEnableCMCGCIndex,
1170             ThreadHolderIndex,
1171             HeapStartAddrIndex,
1172             HeapCurrentEndIndex,
1173             AllocBufferIndex,
1174             StateAndFlagsIndex,
1175             ExceptionIndex,
1176             CurrentFrameIndex,
1177             LeaveFrameIndex,
1178             LastFpIndex,
1179             BaseAddressIndex,
1180             NewSpaceAllocationTopAddressIndex,
1181             NewSpaceAllocationEndAddressIndex,
1182             SOldSpaceAllocationTopAddressIndex,
1183             SOldSpaceAllocationEndAddressIndex,
1184             SNonMovableSpaceAllocationTopAddressIndex,
1185             SNonMovableSpaceAllocationEndAddressIndex,
1186             RTStubEntriesIndex,
1187             COStubEntriesIndex,
1188             BuiltinsStubEntriesIndex,
1189             BuiltinHClassEntriesIndex,
1190             BcDebuggerStubEntriesIndex,
1191             BaselineStubEntriesIndex,
1192             GCStateBitFieldIndex,
1193             SharedGCStateBitFieldIndex,
1194             FrameBaseIndex,
1195             CurrentEnvIndex,
1196             StackStartIndex,
1197             StackLimitIndex,
1198             GlobalConstIndex,
1199             AllowCrossThreadExecutionIndex,
1200             InterruptVectorIndex,
1201             IsStartHeapSamplingIndex,
1202             IsDebugModeIndex,
1203             IsFrameDroppedIndex,
1204             PropertiesGrowStepIndex,
1205             EntryFrameDroppedStateIndex,
1206             BuiltinEntriesIndex,
1207             IsTracingIndex,
1208             UnsharedConstpoolsArrayLenIndex,
1209             UnsharedConstpoolsIndex,
1210             RandomStatePtrIndex,
1211             TaskInfoIndex,
1212             IsEnableMutantArrayIndex,
1213             IsEnableElementsKindIndex,
1214             LoadMegaICCacheIndex,
1215             StoreMegaICCacheIndex,
1216             PropertiesCacheIndex,
1217             megaUpdateCountIndex,
1218             megaProbesCountIndex,
1219             megaHitCountIndex,
1220             ArrayHClassIndexesIndex,
1221             moduleLoggerIndex,
1222             stageOfHotReloadIndex,
1223             isMultiContextTriggeredIndex,
1224             NumOfMembers
1225         };
1226         static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes);
1227 
GetThreadHolderOffsetGlueData1228         static size_t GetThreadHolderOffset(bool isArch32)
1229         {
1230             return GetOffset<static_cast<size_t>(Index::ThreadHolderIndex)>(isArch32);
1231         }
1232 
GetHeapStartAddrOffsetGlueData1233         static size_t GetHeapStartAddrOffset(bool isArch32)
1234         {
1235             return GetOffset<static_cast<size_t>(Index::HeapStartAddrIndex)>(isArch32);
1236         }
1237 
GetHeapCurrentEndOffsetGlueData1238         static size_t GetHeapCurrentEndOffset(bool isArch32)
1239         {
1240             return GetOffset<static_cast<size_t>(Index::HeapCurrentEndIndex)>(isArch32);
1241         }
1242 
GetAllocBufferOffsetGlueData1243         static size_t GetAllocBufferOffset(bool isArch32)
1244         {
1245             return GetOffset<static_cast<size_t>(Index::AllocBufferIndex)>(isArch32);
1246         }
1247 
GetStateAndFlagsOffsetGlueData1248         static size_t GetStateAndFlagsOffset(bool isArch32)
1249         {
1250             return GetOffset<static_cast<size_t>(Index::StateAndFlagsIndex)>(isArch32);
1251         }
1252 
GetExceptionOffsetGlueData1253         static size_t GetExceptionOffset(bool isArch32)
1254         {
1255             return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32);
1256         }
1257 
GetBaseAddressOffsetGlueData1258         static size_t GetBaseAddressOffset(bool isArch32)
1259         {
1260             return GetOffset<static_cast<size_t>(Index::BaseAddressIndex)>(isArch32);
1261         }
1262 
GetGlobalConstOffsetGlueData1263         static size_t GetGlobalConstOffset(bool isArch32)
1264         {
1265             return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32);
1266         }
1267 
GetGCStateBitFieldOffsetGlueData1268         static size_t GetGCStateBitFieldOffset(bool isArch32)
1269         {
1270             return GetOffset<static_cast<size_t>(Index::GCStateBitFieldIndex)>(isArch32);
1271         }
1272 
GetSharedGCStateBitFieldOffsetGlueData1273         static size_t GetSharedGCStateBitFieldOffset(bool isArch32)
1274         {
1275             return GetOffset<static_cast<size_t>(Index::SharedGCStateBitFieldIndex)>(isArch32);
1276         }
1277 
GetCurrentFrameOffsetGlueData1278         static size_t GetCurrentFrameOffset(bool isArch32)
1279         {
1280             return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32);
1281         }
1282 
GetLeaveFrameOffsetGlueData1283         static size_t GetLeaveFrameOffset(bool isArch32)
1284         {
1285             return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32);
1286         }
1287 
GetLastFpOffsetGlueData1288         static size_t GetLastFpOffset(bool isArch32)
1289         {
1290             return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32);
1291         }
1292 
GetNewSpaceAllocationTopAddressOffsetGlueData1293         static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32)
1294         {
1295             return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32);
1296         }
1297 
GetNewSpaceAllocationEndAddressOffsetGlueData1298         static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32)
1299         {
1300             return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32);
1301         }
1302 
GetSOldSpaceAllocationTopAddressOffsetGlueData1303         static size_t GetSOldSpaceAllocationTopAddressOffset(bool isArch32)
1304         {
1305             return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationTopAddressIndex)>(isArch32);
1306         }
1307 
GetSOldSpaceAllocationEndAddressOffsetGlueData1308         static size_t GetSOldSpaceAllocationEndAddressOffset(bool isArch32)
1309         {
1310             return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationEndAddressIndex)>(isArch32);
1311         }
1312 
GetSNonMovableSpaceAllocationTopAddressOffsetGlueData1313         static size_t GetSNonMovableSpaceAllocationTopAddressOffset(bool isArch32)
1314         {
1315             return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationTopAddressIndex)>(isArch32);
1316         }
1317 
GetSNonMovableSpaceAllocationEndAddressOffsetGlueData1318         static size_t GetSNonMovableSpaceAllocationEndAddressOffset(bool isArch32)
1319         {
1320             return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationEndAddressIndex)>(isArch32);
1321         }
1322 
GetBCStubEntriesOffsetGlueData1323         static size_t GetBCStubEntriesOffset(bool isArch32)
1324         {
1325             return GetOffset<static_cast<size_t>(Index::BcStubEntriesIndex)>(isArch32);
1326         }
1327 
GetRTStubEntriesOffsetGlueData1328         static size_t GetRTStubEntriesOffset(bool isArch32)
1329         {
1330             return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32);
1331         }
1332 
GetCOStubEntriesOffsetGlueData1333         static size_t GetCOStubEntriesOffset(bool isArch32)
1334         {
1335             return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32);
1336         }
1337 
GetBaselineStubEntriesOffsetGlueData1338         static size_t GetBaselineStubEntriesOffset(bool isArch32)
1339         {
1340             return GetOffset<static_cast<size_t>(Index::BaselineStubEntriesIndex)>(isArch32);
1341         }
1342 
GetBuiltinsStubEntriesOffsetGlueData1343         static size_t GetBuiltinsStubEntriesOffset(bool isArch32)
1344         {
1345             return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32);
1346         }
1347 
GetBuiltinHClassEntriesOffsetGlueData1348         static size_t GetBuiltinHClassEntriesOffset(bool isArch32)
1349         {
1350             return GetOffset<static_cast<size_t>(Index::BuiltinHClassEntriesIndex)>(isArch32);
1351         }
1352 
GetBuiltinHClassOffsetGlueData1353         static size_t GetBuiltinHClassOffset(BuiltinTypeId type, bool isArch32)
1354         {
1355             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetBuiltinHClassOffset(type);
1356         }
1357 
GetBuiltinInstanceHClassOffsetGlueData1358         static size_t GetBuiltinInstanceHClassOffset(BuiltinTypeId type, bool isArch32)
1359         {
1360             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetInstanceHClassOffset(type);
1361         }
1362 
GetBuiltinPrototypeHClassOffsetGlueData1363         static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId type, bool isArch32)
1364         {
1365             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetPrototypeHClassOffset(type);
1366         }
1367 
GetBuiltinPrototypeOfPrototypeHClassOffsetGlueData1368         static size_t GetBuiltinPrototypeOfPrototypeHClassOffset(BuiltinTypeId type, bool isArch32)
1369         {
1370             return GetBuiltinHClassEntriesOffset(isArch32) +
1371                    BuiltinHClassEntries::GetPrototypeOfPrototypeHClassOffset(type);
1372         }
1373 
GetBuiltinExtraHClassOffsetGlueData1374         static size_t GetBuiltinExtraHClassOffset(BuiltinTypeId type, bool isArch32)
1375         {
1376             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetExtraHClassOffset(type);
1377         }
1378 
GetBCDebuggerStubEntriesOffsetGlueData1379         static size_t GetBCDebuggerStubEntriesOffset(bool isArch32)
1380         {
1381             return GetOffset<static_cast<size_t>(Index::BcDebuggerStubEntriesIndex)>(isArch32);
1382         }
1383 
GetFrameBaseOffsetGlueData1384         static size_t GetFrameBaseOffset(bool isArch32)
1385         {
1386             return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32);
1387         }
1388 
GetStackLimitOffsetGlueData1389         static size_t GetStackLimitOffset(bool isArch32)
1390         {
1391             return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32);
1392         }
1393 
GetCurrentEnvOffsetGlueData1394         static size_t GetCurrentEnvOffset(bool isArch32)
1395         {
1396             return GetOffset<static_cast<size_t>(Index::CurrentEnvIndex)>(isArch32);
1397         }
1398 
GetAllowCrossThreadExecutionOffsetGlueData1399         static size_t GetAllowCrossThreadExecutionOffset(bool isArch32)
1400         {
1401             return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32);
1402         }
1403 
GetInterruptVectorOffsetGlueData1404         static size_t GetInterruptVectorOffset(bool isArch32)
1405         {
1406             return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32);
1407         }
1408 
GetIsStartHeapSamplingOffsetGlueData1409         static size_t GetIsStartHeapSamplingOffset(bool isArch32)
1410         {
1411             return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32);
1412         }
1413 
GetIsDebugModeOffsetGlueData1414         static size_t GetIsDebugModeOffset(bool isArch32)
1415         {
1416             return GetOffset<static_cast<size_t>(Index::IsDebugModeIndex)>(isArch32);
1417         }
1418 
GetIsFrameDroppedOffsetGlueData1419         static size_t GetIsFrameDroppedOffset(bool isArch32)
1420         {
1421             return GetOffset<static_cast<size_t>(Index::IsFrameDroppedIndex)>(isArch32);
1422         }
1423 
GetPropertiesGrowStepOffsetGlueData1424         static size_t GetPropertiesGrowStepOffset(bool isArch32)
1425         {
1426             return GetOffset<static_cast<size_t>(Index::PropertiesGrowStepIndex)>(isArch32);
1427         }
1428 
GetEntryFrameDroppedStateOffsetGlueData1429         static size_t GetEntryFrameDroppedStateOffset(bool isArch32)
1430         {
1431             return GetOffset<static_cast<size_t>(Index::EntryFrameDroppedStateIndex)>(isArch32);
1432         }
1433 
GetBuiltinEntriesOffsetGlueData1434         static size_t GetBuiltinEntriesOffset(bool isArch32)
1435         {
1436             return GetOffset<static_cast<size_t>(Index::BuiltinEntriesIndex)>(isArch32);
1437         }
1438 
GetIsTracingOffsetGlueData1439         static size_t GetIsTracingOffset(bool isArch32)
1440         {
1441             return GetOffset<static_cast<size_t>(Index::IsTracingIndex)>(isArch32);
1442         }
1443 
GetUnSharedConstpoolsOffsetGlueData1444         static size_t GetUnSharedConstpoolsOffset(bool isArch32)
1445         {
1446             return GetOffset<static_cast<size_t>(Index::UnsharedConstpoolsIndex)>(isArch32);
1447         }
1448 
GetUnSharedConstpoolsArrayLenOffsetGlueData1449         static size_t GetUnSharedConstpoolsArrayLenOffset(bool isArch32)
1450         {
1451             return GetOffset<static_cast<size_t>(Index::UnsharedConstpoolsArrayLenIndex)>(isArch32);
1452         }
1453 
GetRandomStatePtrOffsetGlueData1454         static size_t GetRandomStatePtrOffset(bool isArch32)
1455         {
1456             return GetOffset<static_cast<size_t>(Index::RandomStatePtrIndex)>(isArch32);
1457         }
1458 
GetTaskInfoOffsetGlueData1459         static size_t GetTaskInfoOffset(bool isArch32)
1460         {
1461             return GetOffset<static_cast<size_t>(Index::TaskInfoIndex)>(isArch32);
1462         }
1463 
GetIsEnableMutantArrayOffsetGlueData1464         static size_t GetIsEnableMutantArrayOffset(bool isArch32)
1465         {
1466             return GetOffset<static_cast<size_t>(Index::IsEnableMutantArrayIndex)>(isArch32);
1467         }
1468 
GetIsEnableElementsKindOffsetGlueData1469         static size_t GetIsEnableElementsKindOffset(bool isArch32)
1470         {
1471             return GetOffset<static_cast<size_t>(Index::IsEnableElementsKindIndex)>(isArch32);
1472         }
1473 
GetLoadMegaICCacheOffsetGlueData1474         static size_t GetLoadMegaICCacheOffset(bool isArch32)
1475         {
1476             return GetOffset<static_cast<size_t>(Index::LoadMegaICCacheIndex)>(isArch32);
1477         }
1478 
GetStoreMegaICCacheOffsetGlueData1479         static size_t GetStoreMegaICCacheOffset(bool isArch32)
1480         {
1481             return GetOffset<static_cast<size_t>(Index::StoreMegaICCacheIndex)>(isArch32);
1482         }
1483 
GetPropertiesCacheOffsetGlueData1484         static size_t GetPropertiesCacheOffset(bool isArch32)
1485         {
1486             return GetOffset<static_cast<size_t>(Index::PropertiesCacheIndex)>(isArch32);
1487         }
GetMegaProbesCountOffsetGlueData1488         static size_t GetMegaProbesCountOffset(bool isArch32)
1489         {
1490             return GetOffset<static_cast<size_t>(Index::megaProbesCountIndex)>(isArch32);
1491         }
1492 
GetMegaHitCountOffsetGlueData1493         static size_t GetMegaHitCountOffset(bool isArch32)
1494         {
1495             return GetOffset<static_cast<size_t>(Index::megaHitCountIndex)>(isArch32);
1496         }
1497 
GetIsEnableCMCGCOffsetGlueData1498         static size_t GetIsEnableCMCGCOffset(bool isArch32)
1499         {
1500             return GetOffset<static_cast<size_t>(Index::IsEnableCMCGCIndex)>(isArch32);
1501         }
1502 
GetArrayHClassIndexesIndexOffsetGlueData1503         static size_t GetArrayHClassIndexesIndexOffset(bool isArch32)
1504         {
1505             return GetOffset<static_cast<size_t>(Index::ArrayHClassIndexesIndex)>(isArch32);
1506         }
GetModuleLoggerOffsetGlueData1507         static size_t GetModuleLoggerOffset(bool isArch32)
1508         {
1509             return GetOffset<static_cast<size_t>(Index::moduleLoggerIndex)>(
1510                 isArch32);
1511         }
GetStageOfHotReloadOffsetGlueData1512         static size_t GetStageOfHotReloadOffset(bool isArch32)
1513         {
1514             return GetOffset<static_cast<size_t>(Index::stageOfHotReloadIndex)>(
1515                 isArch32);
1516         }
GetIsMultiContextTriggeredOffsetGlueData1517         static size_t GetIsMultiContextTriggeredOffset(bool isArch32)
1518         {
1519             return GetOffset<static_cast<size_t>(Index::isMultiContextTriggeredIndex)>(
1520                 isArch32);
1521         }
1522 
1523         alignas(EAS) BCStubEntries bcStubEntries_ {};
1524         alignas(EAS) uint32_t isEnableCMCGC_ {0};
1525         alignas(EAS) uintptr_t threadHolder_ {0};
1526         alignas(EAS) uintptr_t heapStartAddr_ {0};
1527         alignas(EAS) uintptr_t heapCurrentEnd_ {0};
1528         alignas(EAS) uintptr_t allocBuffer_ {0};
1529         alignas(EAS) ThreadStateAndFlags stateAndFlags_ {};
1530         alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()};
1531         alignas(EAS) JSTaggedType *currentFrame_ {nullptr};
1532         alignas(EAS) JSTaggedType *leaveFrame_ {nullptr};
1533         alignas(EAS) JSTaggedType *lastFp_ {nullptr};
1534         alignas(EAS) JSTaggedType baseAddress_ {0};
1535         alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr};
1536         alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr};
1537         alignas(EAS) const uintptr_t *sOldSpaceAllocationTopAddress_ {nullptr};
1538         alignas(EAS) const uintptr_t *sOldSpaceAllocationEndAddress_ {nullptr};
1539         alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationTopAddress_ {nullptr};
1540         alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationEndAddress_ {nullptr};
1541         alignas(EAS) RTStubEntries rtStubEntries_ {};
1542         alignas(EAS) COStubEntries coStubEntries_ {};
1543         alignas(EAS) BuiltinStubEntries builtinStubEntries_ {};
1544         alignas(EAS) BuiltinHClassEntries builtinHClassEntries_ {};
1545         alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_ {};
1546         alignas(EAS) BaselineStubEntries baselineStubEntries_ {};
1547         alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL};
1548         alignas(EAS) volatile uint64_t sharedGCStateBitField_ {0ULL};
1549         alignas(EAS) JSTaggedType *frameBase_ {nullptr};
1550         alignas(EAS) JSTaggedValue currentEnv_ {JSTaggedValue::Hole()};
1551         alignas(EAS) uint64_t stackStart_ {0};
1552         alignas(EAS) uint64_t stackLimit_ {0};
1553         alignas(EAS) GlobalEnvConstants *globalConst_ {nullptr};
1554         alignas(EAS) bool allowCrossThreadExecution_ {false};
1555         alignas(EAS) volatile uint64_t interruptVector_ {0};
1556         alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()};
1557         alignas(EAS) bool isDebugMode_ {false};
1558         alignas(EAS) bool isFrameDropped_ {false};
1559         alignas(EAS) uint32_t propertiesGrowStep_ {JSObjectResizingStrategy::PROPERTIES_GROW_SIZE};
1560         alignas(EAS) uint64_t entryFrameDroppedState_ {FrameDroppedState::StateFalse};
1561         alignas(EAS) BuiltinEntries builtinEntries_ {};
1562         alignas(EAS) bool isTracing_ {false};
1563         alignas(EAS) uint32_t unsharedConstpoolsArrayLen_ {0};
1564         alignas(EAS) uintptr_t unsharedConstpools_ {0};
1565         alignas(EAS) uintptr_t randomStatePtr_ {0};
1566         alignas(EAS) uintptr_t taskInfo_ {0};
1567         alignas(EAS) bool isEnableMutantArray_ {false};
1568         alignas(EAS) bool IsEnableElementsKind_ {false};
1569         alignas(EAS) MegaICCache *loadMegaICCache_ {nullptr};
1570         alignas(EAS) MegaICCache *storeMegaICCache_ {nullptr};
1571         alignas(EAS) PropertiesCache *propertiesCache_ {nullptr};
1572         alignas(EAS) uint64_t megaUpdateCount_ {0};
1573         alignas(EAS) uint64_t megaProbesCount_ {0};
1574         alignas(EAS) uint64_t megaHitCount {0};
1575         alignas(EAS) ElementsHClassEntries arrayHClassIndexes_ {};
1576         alignas(EAS) ModuleLogger *moduleLogger_ {nullptr};
1577         alignas(EAS) StageOfHotReload stageOfHotReload_ {StageOfHotReload::INITIALIZE_STAGE_OF_HOTRELOAD};
1578         alignas(EAS) bool isMultiContextTriggered_ {false};
1579     };
1580     STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64);
1581 
GetSingleCharTable()1582     JSTaggedValue GetSingleCharTable() const
1583     {
1584         ASSERT(glueData_.globalConst_->GetSingleCharTable() != JSTaggedValue::Hole());
1585         return glueData_.globalConst_->GetSingleCharTable();
1586     }
1587 
GetModuleLogger()1588     ModuleLogger *GetModuleLogger() const
1589     {
1590         return glueData_.moduleLogger_;
1591     }
1592 
SetModuleLogger(ModuleLogger * moduleLogger)1593     void SetModuleLogger(ModuleLogger *moduleLogger)
1594     {
1595         glueData_.moduleLogger_ = moduleLogger;
1596     }
1597 
GetStageOfHotReload()1598     StageOfHotReload GetStageOfHotReload() const
1599     {
1600         return glueData_.stageOfHotReload_;
1601     }
1602 
SetStageOfHotReload(StageOfHotReload stageOfHotReload)1603     void SetStageOfHotReload(StageOfHotReload stageOfHotReload)
1604     {
1605         if (stageOfHotReload == StageOfHotReload::LOAD_END_EXECUTE_PATCHMAIN) {
1606             NotifyHotReloadDeoptimize();
1607         }
1608         glueData_.stageOfHotReload_ = stageOfHotReload;
1609     }
1610 
IsMultiContextTriggered()1611     bool IsMultiContextTriggered() const
1612     {
1613         return glueData_.isMultiContextTriggered_;
1614     }
1615 
SetMultiContextTriggered(bool isMultiContextTriggered)1616     void SetMultiContextTriggered(bool isMultiContextTriggered)
1617     {
1618         glueData_.isMultiContextTriggered_ = isMultiContextTriggered;
1619     }
1620 
1621     JSHandle<DependentInfos> GetDependentInfo() const;
1622 
1623     void SetDependentInfo(JSTaggedValue info);
1624 
1625     JSHandle<DependentInfos> GetOrCreateThreadDependentInfo();
1626 
1627     void NotifyHotReloadDeoptimize();
1628 
1629     ModuleManager *GetModuleManager() const;
1630 
IsInSubStack()1631     bool IsInSubStack() const
1632     {
1633         return isInSubStack_;
1634     }
1635 
GetMainStackInfo()1636     const StackInfo &GetMainStackInfo() const
1637     {
1638         return mainStackInfo_;
1639     }
1640 
1641     bool IsPropertyCacheCleared() const;
1642 
1643     void ClearVMCachedConstantPool();
1644 
1645     bool IsReadyToUpdateDetector() const;
1646     Area *GetOrCreateRegExpCacheArea();
1647 
1648     void InitializeBuiltinObject(const JSHandle<GlobalEnv>& env, const std::string& key);
1649     void InitializeBuiltinObject(const JSHandle<GlobalEnv>& env);
1650 
FullMarkRequest()1651     bool FullMarkRequest() const
1652     {
1653         return fullMarkRequest_;
1654     }
1655 
SetFullMarkRequest()1656     void SetFullMarkRequest()
1657     {
1658         fullMarkRequest_ = true;
1659     }
1660 
ResetFullMarkRequest()1661     void ResetFullMarkRequest()
1662     {
1663         fullMarkRequest_ = false;
1664     }
1665 
SetProcessingLocalToSharedRset(bool processing)1666     void SetProcessingLocalToSharedRset(bool processing)
1667     {
1668         processingLocalToSharedRset_ = processing;
1669     }
1670 
IsProcessingLocalToSharedRset()1671     bool IsProcessingLocalToSharedRset() const
1672     {
1673         return processingLocalToSharedRset_;
1674     }
1675 
IsThreadSafe()1676     inline bool IsThreadSafe() const
1677     {
1678         return IsMainThread() || HasSuspendRequest();
1679     }
1680 
IsSuspended()1681     bool IsSuspended() const
1682     {
1683         ASSERT(!IsEnableCMCGC());
1684         bool f = ReadFlag(ThreadFlag::SUSPEND_REQUEST);
1685         bool s = (GetState() != ThreadState::RUNNING);
1686         return f && s;
1687     }
1688 
HasSuspendRequest()1689     inline bool HasSuspendRequest() const
1690     {
1691         if (LIKELY(!IsEnableCMCGC())) {
1692             return ReadFlag(ThreadFlag::SUSPEND_REQUEST);
1693         } else {
1694             return GetThreadHolder()->HasSuspendRequest();
1695         }
1696     }
1697 
CheckSafepointIfSuspended()1698     void CheckSafepointIfSuspended()
1699     {
1700         if (LIKELY(!IsEnableCMCGC())) {
1701             if (HasSuspendRequest()) {
1702                 WaitSuspension();
1703             }
1704         } else {
1705             GetThreadHolder()->CheckSafepointIfSuspended();
1706         }
1707     }
1708 
IsInSuspendedState()1709     bool IsInSuspendedState() const
1710     {
1711         ASSERT(!IsEnableCMCGC());
1712         return GetState() == ThreadState::IS_SUSPENDED;
1713     }
1714 
IsInRunningState()1715     bool IsInRunningState() const
1716     {
1717         if (LIKELY(!IsEnableCMCGC())) {
1718             return GetState() == ThreadState::RUNNING;
1719         } else {
1720             return GetThreadHolder()->IsInRunningState();
1721         }
1722     }
1723 
1724     bool IsInRunningStateOrProfiling() const;
1725 
GetThreadHolder()1726     ThreadHolder *GetThreadHolder() const
1727     {
1728         return reinterpret_cast<ThreadHolder *>(glueData_.threadHolder_);
1729     }
1730 
1731     // to impl
Visit(common::CommonRootVisitor visitor)1732     void Visit(common::CommonRootVisitor visitor)
1733     {
1734         visitor(nullptr);
1735     }
1736 
SetAllocBuffer(void * allocBuffer)1737     void SetAllocBuffer(void* allocBuffer)
1738     {
1739         glueData_.allocBuffer_ = reinterpret_cast<uintptr_t>(allocBuffer);
1740     }
1741 
GetState()1742     ThreadState GetState() const
1743     {
1744         ASSERT(!IsEnableCMCGC());
1745         uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire);
1746         return static_cast<ThreadState>(stateAndFlags >> THREAD_STATE_OFFSET);
1747     }
1748 
1749     ThreadState PUBLIC_API UpdateState(ThreadState newState);
1750 
1751     // newState must be non running
TransferToNonRunning(ThreadState newState)1752     ThreadState PUBLIC_API TransferToNonRunning(ThreadState newState)
1753     {
1754         ASSERT(newState != ThreadState::RUNNING);
1755         ThreadState oldState = GetState();
1756         if (oldState == ThreadState::RUNNING) {
1757             TransferFromRunningToSuspended(newState);
1758         } else if (oldState != newState) {
1759             StoreState(newState);
1760         }
1761         return oldState;
1762     }
1763 
1764     // newState must be running
TransferToRunningIfNonRunning()1765     ThreadState PUBLIC_API TransferToRunningIfNonRunning()
1766     {
1767         ThreadState oldState = GetState();
1768         if (LIKELY(oldState != ThreadState::RUNNING)) {
1769             TransferToRunning();
1770         }
1771         return oldState;
1772     }
1773 
1774     // newState must be non running and oldState must be running.
TransferToNonRunningInRunning(ThreadState newState)1775     void PUBLIC_API TransferToNonRunningInRunning(ThreadState newState)
1776     {
1777         ASSERT(newState != ThreadState::RUNNING);
1778         ASSERT(GetState() == ThreadState::RUNNING);
1779         GetState();
1780         TransferFromRunningToSuspended(newState);
1781     }
1782 
1783     // oldState must be non running.
TransferInNonRunning(ThreadState newState)1784     void PUBLIC_API TransferInNonRunning(ThreadState newState)
1785     {
1786         ASSERT(GetState() != ThreadState::RUNNING);
1787         GetState();
1788         if (newState == ThreadState::RUNNING) {
1789             TransferToRunning();
1790         } else {
1791             StoreState(newState);
1792         }
1793     }
1794 
1795     void SuspendThread(bool internalSuspend, SuspendBarrier* barrier = nullptr);
1796     void ResumeThread(bool internalSuspend);
1797     void WaitSuspension();
1798     static bool IsMainThread();
1799     PUBLIC_API void ManagedCodeBegin();
1800     PUBLIC_API void ManagedCodeEnd();
1801 #ifndef NDEBUG
1802     bool IsInManagedState() const;
1803     MutatorLock::MutatorLockState GetMutatorLockState() const;
1804     void SetMutatorLockState(MutatorLock::MutatorLockState newState);
1805 #endif
SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback & callback)1806     void SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback &callback)
1807     {
1808         finalizeTaskCallback_ = callback;
1809     }
1810 
ShouldIgnoreFinalizeCallback()1811     bool ShouldIgnoreFinalizeCallback() const
1812     {
1813         return ignoreFinalizeCallback_;
1814     }
1815 
IgnoreFinalizeCallback()1816     void IgnoreFinalizeCallback()
1817     {
1818         ignoreFinalizeCallback_ = true;
1819         SetWeakFinalizeTaskCallback(nullptr);
1820     }
1821 
GetJobId()1822     uint64_t GetJobId()
1823     {
1824         if (jobId_ == UINT64_MAX) {
1825             jobId_ = 0;
1826         }
1827         return ++jobId_;
1828     }
1829 
SetAsyncCleanTaskCallback(const NativePointerTaskCallback & callback)1830     void SetAsyncCleanTaskCallback(const NativePointerTaskCallback &callback)
1831     {
1832         asyncCleanTaskCb_ = callback;
1833     }
1834 
GetAsyncCleanTaskCallback()1835     NativePointerTaskCallback GetAsyncCleanTaskCallback() const
1836     {
1837         return asyncCleanTaskCb_;
1838     }
1839 
1840     static void RegisterThread(JSThread *jsThread);
1841 
1842     static void UnregisterThread(JSThread *jsThread);
1843 
IsJSThread()1844     bool IsJSThread() const
1845     {
1846         return threadType_ == ThreadType::JS_THREAD;
1847     }
1848 
IsJitThread()1849     bool IsJitThread() const
1850     {
1851         return threadType_ == ThreadType::JIT_THREAD;
1852     }
1853 
IsDaemonThread()1854     bool IsDaemonThread() const
1855     {
1856         return threadType_ == ThreadType::DAEMON_THREAD;
1857     }
1858 
1859     // Daemon_Thread and JS_Thread have some difference in transition, for example, when transition to running,
1860     // JS_Thread may take some local_gc actions, but Daemon_Thread do not need.
1861     void TransferDaemonThreadToRunning();
1862 
GetJitLock()1863     RecursiveMutex *GetJitLock()
1864     {
1865         return &jitMutex_;
1866     }
1867 
GetProfileTypeAccessorLock()1868     RecursiveMutex &GetProfileTypeAccessorLock()
1869     {
1870         return profileTypeAccessorLockMutex_;
1871     }
1872 
SetMachineCodeLowMemory(bool isLow)1873     void SetMachineCodeLowMemory(bool isLow)
1874     {
1875         machineCodeLowMemory_ = isLow;
1876     }
1877 
IsMachineCodeLowMemory()1878     bool IsMachineCodeLowMemory()
1879     {
1880         return machineCodeLowMemory_;
1881     }
1882 
GetEnv()1883     void *GetEnv() const
1884     {
1885         return env_;
1886     }
1887 
SetEnv(void * env)1888     void SetEnv(void *env)
1889     {
1890         env_ = env;
1891     }
1892 
SetIsInConcurrentScope(bool flag)1893     void SetIsInConcurrentScope(bool flag)
1894     {
1895         isInConcurrentScope_ = flag;
1896     }
1897 
IsInConcurrentScope()1898     bool IsInConcurrentScope()
1899     {
1900         return isInConcurrentScope_;
1901     }
1902 
1903     void UpdateStackInfo(void *stackInfo, StackInfoOpKind opKind);
1904 
GetDateUtils()1905     DateUtils *GetDateUtils() const
1906     {
1907         return dateUtils_;
1908     }
1909 
CheckMultiThread()1910     bool CheckMultiThread() const
1911     {
1912         return GetThreadId() != JSThread::GetCurrentThreadId() && !IsCrossThreadExecutionEnable();
1913     }
1914 
1915 #ifndef NDEBUG
LaunchSuspendAll()1916     inline void LaunchSuspendAll()
1917     {
1918         launchedSuspendAll_ = true;
1919     }
1920 
HasLaunchedSuspendAll()1921     inline bool HasLaunchedSuspendAll() const
1922     {
1923         return launchedSuspendAll_;
1924     }
1925 
CompleteSuspendAll()1926     inline void CompleteSuspendAll()
1927     {
1928         launchedSuspendAll_ = false;
1929     }
1930 #endif
1931 
GetMegaProbeCount()1932     uint64_t GetMegaProbeCount() const
1933     {
1934         return glueData_.megaProbesCount_;
1935     }
1936 
GetMegaHitCount()1937     uint64_t GetMegaHitCount() const
1938     {
1939         return glueData_.megaHitCount;
1940     }
1941 
GetMegaUpdateCount()1942     uint64_t GetMegaUpdateCount() const
1943     {
1944         return glueData_.megaUpdateCount_;
1945     }
1946 
IncMegaUpdateCount()1947     void IncMegaUpdateCount()
1948     {
1949         glueData_.megaUpdateCount_++;
1950     }
1951 
ClearMegaStat()1952     void ClearMegaStat()
1953     {
1954         glueData_.megaHitCount = 0;
1955         glueData_.megaProbesCount_ = 0;
1956         glueData_.megaUpdateCount_ = 0;
1957     }
PrintMegaICStat()1958     void PrintMegaICStat()
1959     {
1960         const int precision = 2;
1961         const double percent = 100.0;
1962         LOG_ECMA(INFO)
1963             << "------------------------------------------------------------"
1964             << "---------------------------------------------------------";
1965         LOG_ECMA(INFO) << "MegaUpdateCount: " << GetMegaUpdateCount();
1966         LOG_ECMA(INFO) << "MegaHitCount: " << GetMegaHitCount();
1967         LOG_ECMA(INFO) << "MegaProbeCount: " << GetMegaProbeCount();
1968         LOG_ECMA(INFO) << "MegaHitRate: " << std::fixed
1969                        << std::setprecision(precision)
1970                        << (GetMegaProbeCount() > 0
1971                                ? static_cast<double>(GetMegaHitCount()) /
1972                                      GetMegaProbeCount() * percent
1973                                : 0.0)
1974                        << "%";
1975         LOG_ECMA(INFO)
1976             << "------------------------------------------------------------"
1977             << "---------------------------------------------------------";
1978         ClearMegaStat();
1979     }
1980     JSTHREAD_PUBLIC_HYBRID_EXTENSION();
1981 protected:
SetThreadId()1982     void SetThreadId()
1983     {
1984         id_.store(JSThread::GetCurrentThreadId(), std::memory_order_release);
1985     }
1986 
1987     // When call EcmaVM::PreFork(), the std::thread for Daemon_Thread is finished, but the Daemon_Thread instance
1988     // is still alive, and need to reset ThreadId to 0.
ResetThreadId()1989     void ResetThreadId()
1990     {
1991         id_.store(0, std::memory_order_release);
1992     }
1993 private:
1994     NO_COPY_SEMANTIC(JSThread);
1995     NO_MOVE_SEMANTIC(JSThread);
SetGlobalConst(GlobalEnvConstants * globalConst)1996     void SetGlobalConst(GlobalEnvConstants *globalConst)
1997     {
1998         glueData_.globalConst_ = globalConst;
1999     }
2000 
2001     void PUBLIC_API TransferFromRunningToSuspended(ThreadState newState);
2002 
2003     void PUBLIC_API TransferToRunning();
2004 
2005     void PUBLIC_API StoreState(ThreadState newState);
2006 
2007     void StoreRunningState(ThreadState newState);
2008 
2009     void StoreSuspendedState(ThreadState newState);
2010 
2011     void TryTriggerFullMarkBySharedLimit();
2012 
ReadFlag(ThreadFlag flag)2013     bool ReadFlag(ThreadFlag flag) const
2014     {
2015         ASSERT(!IsEnableCMCGC());
2016         uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire);
2017         uint16_t flags = (stateAndFlags & THREAD_FLAGS_MASK);
2018         return (flags & static_cast<uint16_t>(flag)) != 0;
2019     }
2020 
SetFlag(ThreadFlag flag)2021     void SetFlag(ThreadFlag flag)
2022     {
2023         ASSERT(!IsEnableCMCGC());
2024         glueData_.stateAndFlags_.asAtomicInt.fetch_or(flag, std::memory_order_seq_cst);
2025     }
2026 
ClearFlag(ThreadFlag flag)2027     void ClearFlag(ThreadFlag flag)
2028     {
2029         ASSERT(!IsEnableCMCGC());
2030         glueData_.stateAndFlags_.asAtomicInt.fetch_and(UINT32_MAX ^ flag, std::memory_order_seq_cst);
2031     }
2032 
2033     void DumpStack() DUMP_API_ATTR;
2034 
2035     GlueData glueData_;
2036     std::atomic<ThreadId> id_ {0};
2037     EcmaVM *vm_ {nullptr};
2038     void *env_ {nullptr};
2039     Area *regExpCacheArea_ {nullptr};
2040 
2041     // MM: handles, global-handles, and aot-stubs.
2042     int nestedLevel_ = 0;
2043     NativeAreaAllocator *nativeAreaAllocator_ {nullptr};
2044     HeapRegionAllocator *heapRegionAllocator_ {nullptr};
2045     bool runningNativeFinalizeCallbacks_ {false};
2046     std::vector<std::pair<WeakClearCallback, void *>> weakNodeFreeGlobalCallbacks_ {};
2047     std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {};
2048 
2049     EcmaGlobalStorage<Node> *globalStorage_ {nullptr};
2050     EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr};
2051     int32_t stackTraceFd_ {-1};
2052     std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_;
2053     std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_;
2054     std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_,
2055          WeakClearCallback nativeFinalizeCallBack)> setWeak_;
2056     std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_;
2057     std::function<bool(uintptr_t addr)> isWeak_;
2058     NativePointerTaskCallback asyncCleanTaskCb_ {nullptr};
2059     WeakFinalizeTaskCallback finalizeTaskCallback_ {nullptr};
2060     bool ignoreFinalizeCallback_ {false};
2061     uint32_t globalNumberCount_ {0};
2062 
2063     // Run-time state
2064     bool getStackSignal_ {false};
2065     bool runtimeState_ {false};
2066     bool isAsmInterpreter_ {false};
2067     VmThreadControl *vmThreadControl_ {nullptr};
2068     bool enableStackSourceFile_ {true};
2069     bool enableLazyBuiltins_ {false};
2070     bool inGlobalEnvInitialize_ {false};
2071     bool readyForGCIterating_ {false};
2072     bool isUncaughtExceptionRegistered_ {false};
2073     // CpuProfiler
2074     bool isProfiling_ {false};
2075     bool gcState_ {false};
2076     std::atomic_bool needProfiling_ {false};
2077     std::string profileName_ {""};
2078 
2079     // Error callback
2080     OnErrorCallback onErrorCallback_ {nullptr};
2081     void *onErrorData_ {nullptr};
2082 
2083     bool finalizationCheckState_ {false};
2084     // Shared heap
2085     bool isMainThread_ {false};
2086     bool fullMarkRequest_ {false};
2087     // Shared heap collect local heap Rset
2088     bool processingLocalToSharedRset_ {false};
2089 
2090     CMap<JSHClass *, GlobalIndex> ctorHclassEntries_;
2091 
2092     bool isInSubStack_ {false};
2093     StackInfo mainStackInfo_ { 0ULL, 0ULL };
2094 
2095     Mutex suspendLock_;
2096     int32_t suspendCount_ {0};
2097     ConditionVariable suspendCondVar_;
2098     SuspendBarrier *suspendBarrier_ {nullptr};
2099 
2100     uint64_t jobId_ {0};
2101 
2102     ThreadType threadType_ {ThreadType::JS_THREAD};
2103     RecursiveMutex jitMutex_;
2104     bool machineCodeLowMemory_ {false};
2105     RecursiveMutex profileTypeAccessorLockMutex_;
2106     DateUtils *dateUtils_ {nullptr};
2107 
2108 #ifndef NDEBUG
2109     MutatorLock::MutatorLockState mutatorLockState_ = MutatorLock::MutatorLockState::UNLOCKED;
2110     std::atomic<bool> launchedSuspendAll_ {false};
2111 #endif
2112     // Collect a map from JsError to MachineCode objects, JsError objects with stack frame generated by jit in the map.
2113     // It will be used to keep MachineCode objects alive (for dump) before JsError object be free.
2114     std::map<JSTaggedType, JitCodeVector*> jitCodeMaps_;
2115     std::unordered_map<uintptr_t, uintptr_t> callSiteSpToReturnAddrTable_;
2116 
2117     std::atomic<bool> needTermination_ {false};
2118     std::atomic<bool> hasTerminated_ {false};
2119 
2120     bool isInConcurrentScope_ {false};
2121     JSTaggedValue hotReloadDependInfo_ {JSTaggedValue::Undefined()};
2122     JSTHREAD_PRIVATE_HYBRID_EXTENSION();
2123 
2124     friend class GlobalHandleCollection;
2125     friend class EcmaVM;
2126     friend class JitVM;
2127     friend class DaemonThread;
2128 };
2129 class SaveEnv {
2130 public:
SaveEnv(JSThread * thread)2131     explicit SaveEnv(JSThread* thread): thread_(thread)
2132     {
2133         env_ = JSHandle<JSTaggedValue>(thread_, thread->GetGlueGlobalEnv());
2134     }
2135 
~SaveEnv()2136     ~SaveEnv()
2137     {
2138         thread_->SetGlueGlobalEnv(env_.GetTaggedValue());
2139     }
2140 
2141 private:
2142     JSThread* const thread_;
2143     JSHandle<JSTaggedValue> env_;
2144 };
2145 
2146 class SaveAndSwitchEnv : public SaveEnv {
2147 public:
SaveAndSwitchEnv(JSThread * thread,JSTaggedValue newEnv)2148     SaveAndSwitchEnv(JSThread* thread, JSTaggedValue newEnv): SaveEnv(thread)
2149     {
2150         thread->SetGlueGlobalEnv(newEnv);
2151     }
2152 };
2153 }  // namespace panda::ecmascript
2154 #endif  // ECMASCRIPT_JS_THREAD_H
2155