• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_JS_THREAD_H
17 #define ECMASCRIPT_JS_THREAD_H
18 
19 #include <atomic>
20 #include <sstream>
21 #include <string>
22 #include <cstdint>
23 
24 #include "ecmascript/platform/ffrt.h"
25 #include "ecmascript/base/aligned_struct.h"
26 #include "ecmascript/builtin_entries.h"
27 #include "ecmascript/daemon/daemon_task.h"
28 #include "ecmascript/global_index.h"
29 #include "ecmascript/ic/mega_ic_cache.h"
30 #include "ecmascript/js_object_resizing_strategy.h"
31 #include "ecmascript/js_tagged_value.h"
32 #include "ecmascript/js_thread_hclass_entries.h"
33 #include "ecmascript/js_thread_stub_entries.h"
34 #include "ecmascript/js_thread_elements_hclass_entries.h"
35 #include "ecmascript/log_wrapper.h"
36 #include "ecmascript/mem/visitor.h"
37 #include "ecmascript/mutator_lock.h"
38 
39 #if defined(ENABLE_FFRT_INTERFACES)
40 #include "ffrt.h"
41 #include "c/executor_task.h"
42 #endif
43 
44 namespace panda::ecmascript {
45 class DateUtils;
46 class EcmaContext;
47 class EcmaVM;
48 class EcmaHandleScope;
49 class GlobalIndex;
50 class HeapRegionAllocator;
51 class PropertiesCache;
52 class MegaICCache;
53 template<typename T>
54 class EcmaGlobalStorage;
55 class Node;
56 class DebugNode;
57 class VmThreadControl;
58 class GlobalEnvConstants;
59 enum class ElementsKind : uint8_t;
60 
61 class MachineCode;
62 using JitCodeVector = std::vector<std::tuple<MachineCode*, std::string, uintptr_t>>;
63 using JitCodeMapVisitor = std::function<void(std::map<JSTaggedType, JitCodeVector*>&)>;
64 
65 using WeakClearCallback = void (*)(void *);
66 
67 enum class MarkStatus : uint8_t {
68     READY_TO_MARK,
69     MARKING,
70     MARK_FINISHED,
71 };
72 
73 enum class GCKind : uint8_t {
74     LOCAL_GC,
75     SHARED_GC
76 };
77 
78 enum class PGOProfilerStatus : uint8_t {
79     PGO_PROFILER_DISABLE,
80     PGO_PROFILER_ENABLE,
81 };
82 
83 enum class BCStubStatus: uint8_t {
84     NORMAL_BC_STUB,
85     PROFILE_BC_STUB,
86     JIT_PROFILE_BC_STUB,
87 };
88 
89 enum ThreadType : uint8_t {
90     JS_THREAD,
91     JIT_THREAD,
92     DAEMON_THREAD,
93 };
94 
95 enum ThreadFlag : uint16_t {
96     NO_FLAGS = 0 << 0,
97     SUSPEND_REQUEST = 1 << 0,
98     ACTIVE_BARRIER = 1 << 1,
99 };
100 
101 static constexpr uint32_t THREAD_STATE_OFFSET = 16;
102 static constexpr uint32_t THREAD_FLAGS_MASK = (0x1 << THREAD_STATE_OFFSET) - 1;
103 enum class ThreadState : uint16_t {
104     CREATED = 0,
105     RUNNING = 1,
106     NATIVE = 2,
107     WAIT = 3,
108     IS_SUSPENDED = 4,
109     TERMINATED = 5,
110 };
111 
112 union ThreadStateAndFlags {
asInt(val)113     explicit ThreadStateAndFlags(uint32_t val = 0): asInt(val) {}
114     struct {
115         volatile uint16_t flags;
116         volatile ThreadState state;
117     } asStruct;
118     struct {
119         uint16_t flags;
120         ThreadState state;
121     } asNonvolatileStruct;
122     volatile uint32_t asInt;
123     uint32_t asNonvolatileInt;
124     std::atomic<uint32_t> asAtomicInt;
125 private:
126     NO_COPY_SEMANTIC(ThreadStateAndFlags);
127 };
128 
129 static constexpr uint32_t MAIN_THREAD_INDEX = 0;
130 
131 class JSThread {
132 public:
133     static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2;
134     static constexpr int CONCURRENT_MARKING_BITFIELD_MASK = 0x3;
135     static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_NUM = 1;
136     static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_MASK = 0x1;
137     static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8;
138     static constexpr int PGO_PROFILER_BITFIELD_START = 16;
139     static constexpr int BOOL_BITFIELD_NUM = 1;
140     static constexpr int BCSTUBSTATUS_BITFIELD_NUM = 2;
141     static constexpr uint32_t RESERVE_STACK_SIZE = 128;
142     using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>;
143     using SharedMarkStatusBits = BitField<SharedMarkStatus, 0, SHARED_CONCURRENT_MARKING_BITFIELD_NUM>;
144     using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>;
145     using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>;
146     using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag;
147     using InstallMachineCodeBit = VMHasSuspendedBit::NextFlag;
148     using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>;
149     using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BCSTUBSTATUS_BITFIELD_NUM>;
150     using ThreadId = uint32_t;
151 
152     enum FrameDroppedState {
153         StateFalse = 0,
154         StateTrue,
155         StatePending
156     };
157 
158     enum StackInfoOpKind : uint32_t {
159         SwitchToSubStackInfo = 0,
160         SwitchToMainStackInfo,
161     };
162 
163     struct StackInfo {
164         uint64_t stackLimit;
165         uint64_t lastLeaveFrame;
166     };
167 
168     explicit JSThread(EcmaVM *vm);
169     // only used in jit thread
170     explicit JSThread(EcmaVM *vm, ThreadType threadType);
171     // only used in daemon thread
172     explicit JSThread(ThreadType threadType);
173 
174     PUBLIC_API ~JSThread();
175 
GetEcmaVM()176     EcmaVM *GetEcmaVM() const
177     {
178         return vm_;
179     }
180 
181     static JSThread *Create(EcmaVM *vm);
182     static JSThread *GetCurrent();
183 
GetNestedLevel()184     int GetNestedLevel() const
185     {
186         return nestedLevel_;
187     }
188 
SetNestedLevel(int level)189     void SetNestedLevel(int level)
190     {
191         nestedLevel_ = level;
192     }
193 
SetLastFp(JSTaggedType * fp)194     void SetLastFp(JSTaggedType *fp)
195     {
196         glueData_.lastFp_ = fp;
197     }
198 
GetLastFp()199     const JSTaggedType *GetLastFp() const
200     {
201         return glueData_.lastFp_;
202     }
203 
GetCurrentSPFrame()204     const JSTaggedType *GetCurrentSPFrame() const
205     {
206         return glueData_.currentFrame_;
207     }
208 
SetCurrentSPFrame(JSTaggedType * sp)209     void SetCurrentSPFrame(JSTaggedType *sp)
210     {
211         glueData_.currentFrame_ = sp;
212     }
213 
GetLastLeaveFrame()214     const JSTaggedType *GetLastLeaveFrame() const
215     {
216         return glueData_.leaveFrame_;
217     }
218 
SetLastLeaveFrame(JSTaggedType * sp)219     void SetLastLeaveFrame(JSTaggedType *sp)
220     {
221         glueData_.leaveFrame_ = sp;
222     }
223 
224     const JSTaggedType *GetCurrentFrame() const;
225 
226     void SetCurrentFrame(JSTaggedType *sp);
227 
228     const JSTaggedType *GetCurrentInterpretedFrame() const;
229 
230     bool DoStackOverflowCheck(const JSTaggedType *sp);
231 
232     bool DoStackLimitCheck();
233 
GetNativeAreaAllocator()234     NativeAreaAllocator *GetNativeAreaAllocator() const
235     {
236         return nativeAreaAllocator_;
237     }
238 
GetHeapRegionAllocator()239     HeapRegionAllocator *GetHeapRegionAllocator() const
240     {
241         return heapRegionAllocator_;
242     }
243 
ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)244     void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
245     {
246         glueData_.newSpaceAllocationTopAddress_ = top;
247         glueData_.newSpaceAllocationEndAddress_ = end;
248     }
249 
ReSetSOldSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)250     void ReSetSOldSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
251     {
252         glueData_.sOldSpaceAllocationTopAddress_ = top;
253         glueData_.sOldSpaceAllocationEndAddress_ = end;
254     }
255 
ReSetSNonMovableSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)256     void ReSetSNonMovableSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end)
257     {
258         glueData_.sNonMovableSpaceAllocationTopAddress_ = top;
259         glueData_.sNonMovableSpaceAllocationEndAddress_ = end;
260     }
261 
GetUnsharedConstpools()262     uintptr_t GetUnsharedConstpools() const
263     {
264         return glueData_.unsharedConstpools_;
265     }
266 
SetUnsharedConstpools(uintptr_t unsharedConstpools)267     void SetUnsharedConstpools(uintptr_t unsharedConstpools)
268     {
269         glueData_.unsharedConstpools_ = unsharedConstpools;
270     }
271 
GetUnsharedConstpoolsArrayLen()272     uintptr_t GetUnsharedConstpoolsArrayLen() const
273     {
274         return glueData_.unsharedConstpoolsArrayLen_;
275     }
276 
SetUnsharedConstpoolsArrayLen(uint32_t unsharedConstpoolsArrayLen)277     void SetUnsharedConstpoolsArrayLen(uint32_t unsharedConstpoolsArrayLen)
278     {
279         glueData_.unsharedConstpoolsArrayLen_ = unsharedConstpoolsArrayLen;
280     }
281 
SetIsStartHeapSampling(bool isStart)282     void SetIsStartHeapSampling(bool isStart)
283     {
284         glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False();
285     }
286 
SetIsTracing(bool isTracing)287     void SetIsTracing(bool isTracing)
288     {
289         glueData_.isTracing_ = isTracing;
290     }
291 
292     void Iterate(RootVisitor &visitor);
293 
294     void IterateJitCodeMap(const JitCodeMapVisitor &updater);
295 
296     void IterateHandleWithCheck(RootVisitor &visitor);
297 
298     uintptr_t* PUBLIC_API ExpandHandleStorage();
299     void PUBLIC_API ShrinkHandleStorage(int prevIndex);
300     void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const;
301     bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const;
302 
303     void PUBLIC_API SetException(JSTaggedValue exception);
304 
GetException()305     JSTaggedValue GetException() const
306     {
307         return glueData_.exception_;
308     }
309 
HasPendingException()310     bool HasPendingException() const
311     {
312         return !glueData_.exception_.IsHole();
313     }
314 
315     void ClearException();
316 
SetGlobalObject(JSTaggedValue globalObject)317     void SetGlobalObject(JSTaggedValue globalObject)
318     {
319         glueData_.globalObject_ = globalObject;
320     }
321 
GetGlobalEnv()322     const GlobalEnv *GetGlobalEnv() const
323     {
324         return glueData_.glueGlobalEnv_;
325     }
326 
GlobalConstants()327     const GlobalEnvConstants *GlobalConstants() const
328     {
329         return glueData_.globalConst_;
330     }
331 
SetGlobalConstants(const GlobalEnvConstants * constants)332     void SetGlobalConstants(const GlobalEnvConstants *constants)
333     {
334         glueData_.globalConst_ = const_cast<GlobalEnvConstants*>(constants);
335     }
336 
GetBuiltinEntriesPointer()337     BuiltinEntries* GetBuiltinEntriesPointer()
338     {
339         return &glueData_.builtinEntries_;
340     }
341 
GetCtorHclassEntries()342     const CMap<JSHClass *, GlobalIndex> &GetCtorHclassEntries() const
343     {
344         return ctorHclassEntries_;
345     }
346 
347     void NotifyArrayPrototypeChangedGuardians(JSHandle<JSObject> receiver);
348 
IsArrayPrototypeChangedGuardiansInvalid()349     bool IsArrayPrototypeChangedGuardiansInvalid() const
350     {
351         return !glueData_.arrayPrototypeChangedGuardians_;
352     }
353 
354     void ResetGuardians();
355 
356     void SetInitialBuiltinHClass(
357         BuiltinTypeId type, JSHClass *builtinHClass, JSHClass *instanceHClass,
358                             JSHClass *prototypeHClass, JSHClass *prototypeOfPrototypeHClass = nullptr,
359                             JSHClass *extraHClass = nullptr);
360 
361     void SetInitialBuiltinGlobalHClass(JSHClass *builtinHClass, GlobalIndex globalIndex);
362 
363     JSHClass *GetBuiltinHClass(BuiltinTypeId type) const;
364 
365     JSHClass *GetBuiltinInstanceHClass(BuiltinTypeId type) const;
366     JSHClass *GetBuiltinExtraHClass(BuiltinTypeId type) const;
367 
GetArrayInstanceHClass(ElementsKind kind,bool isPrototype)368     JSHClass* GetArrayInstanceHClass(ElementsKind kind, bool isPrototype) const
369     {
370         ConstantIndex index = glueData_.arrayHClassIndexes_.GetArrayInstanceHClassIndex(kind, isPrototype);
371         auto exceptArrayHClass = GlobalConstants()->GetGlobalConstantObject(static_cast<size_t>(index));
372         auto exceptRecvHClass = JSHClass::Cast(exceptArrayHClass.GetTaggedObject());
373         ASSERT(exceptRecvHClass->IsJSArray());
374         return exceptRecvHClass;
375     }
376 
GetArrayInstanceHClassIndex(ElementsKind kind,bool isPrototype)377     ConstantIndex GetArrayInstanceHClassIndex(ElementsKind kind, bool isPrototype) const
378     {
379         return glueData_.arrayHClassIndexes_.GetArrayInstanceHClassIndex(kind, isPrototype);
380     }
381 
382     PUBLIC_API JSHClass *GetBuiltinPrototypeHClass(BuiltinTypeId type) const;
383     PUBLIC_API JSHClass *GetBuiltinPrototypeOfPrototypeHClass(BuiltinTypeId type) const;
384 
385     static size_t GetBuiltinHClassOffset(BuiltinTypeId, bool isArch32);
386 
387     static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId, bool isArch32);
388 
GetBuiltinHClassEntries()389     const BuiltinHClassEntries &GetBuiltinHClassEntries() const
390     {
391         return glueData_.builtinHClassEntries_;
392     }
393 
394     JSTaggedValue GetCurrentLexenv() const;
395     JSTaggedValue GetCurrentFunction() const;
396 
RegisterRTInterface(size_t id,Address addr)397     void RegisterRTInterface(size_t id, Address addr)
398     {
399         ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS);
400         glueData_.rtStubEntries_.Set(id, addr);
401     }
402 
GetRTInterface(size_t id)403     Address GetRTInterface(size_t id) const
404     {
405         ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS);
406         return glueData_.rtStubEntries_.Get(id);
407     }
408 
GetFastStubEntry(uint32_t id)409     Address GetFastStubEntry(uint32_t id) const
410     {
411         return glueData_.coStubEntries_.Get(id);
412     }
413 
SetFastStubEntry(size_t id,Address entry)414     void SetFastStubEntry(size_t id, Address entry)
415     {
416         glueData_.coStubEntries_.Set(id, entry);
417     }
418 
GetBuiltinStubEntry(uint32_t id)419     Address GetBuiltinStubEntry(uint32_t id) const
420     {
421         return glueData_.builtinStubEntries_.Get(id);
422     }
423 
SetBuiltinStubEntry(size_t id,Address entry)424     void SetBuiltinStubEntry(size_t id, Address entry)
425     {
426         glueData_.builtinStubEntries_.Set(id, entry);
427     }
428 
GetBCStubEntry(uint32_t id)429     Address GetBCStubEntry(uint32_t id) const
430     {
431         return glueData_.bcStubEntries_.Get(id);
432     }
433 
SetBCStubEntry(size_t id,Address entry)434     void SetBCStubEntry(size_t id, Address entry)
435     {
436         glueData_.bcStubEntries_.Set(id, entry);
437     }
438 
GetBaselineStubEntry(uint32_t id)439     Address GetBaselineStubEntry(uint32_t id) const
440     {
441         return glueData_.baselineStubEntries_.Get(id);
442     }
443 
SetBaselineStubEntry(size_t id,Address entry)444     void SetBaselineStubEntry(size_t id, Address entry)
445     {
446         glueData_.baselineStubEntries_.Set(id, entry);
447     }
448 
SetBCDebugStubEntry(size_t id,Address entry)449     void SetBCDebugStubEntry(size_t id, Address entry)
450     {
451         glueData_.bcDebuggerStubEntries_.Set(id, entry);
452     }
453 
GetBytecodeHandler()454     Address *GetBytecodeHandler()
455     {
456         return glueData_.bcStubEntries_.GetAddr();
457     }
458 
459     void PUBLIC_API CheckSwitchDebuggerBCStub();
460     void CheckOrSwitchPGOStubs();
461     void SwitchJitProfileStubs(bool isEnablePgo);
462 
GetThreadId()463     ThreadId GetThreadId() const
464     {
465         return id_.load(std::memory_order_acquire);
466     }
467 
468     void PostFork();
469 
470     static ThreadId GetCurrentThreadId();
471 
472     void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor, GCKind gcKind = GCKind::LOCAL_GC);
473 
474     void UpdateJitCodeMapReference(const WeakRootVisitor &visitor);
475 
476     PUBLIC_API PropertiesCache *GetPropertiesCache() const;
477     PUBLIC_API MegaICCache *GetLoadMegaICCache() const;
478     PUBLIC_API MegaICCache *GetStoreMegaICCache() const;
479 
GetMarkStatus()480     MarkStatus GetMarkStatus() const
481     {
482         return MarkStatusBits::Decode(glueData_.gcStateBitField_);
483     }
484 
SetMarkStatus(MarkStatus status)485     void SetMarkStatus(MarkStatus status)
486     {
487         MarkStatusBits::Set(status, &glueData_.gcStateBitField_);
488     }
489 
IsConcurrentMarkingOrFinished()490     bool IsConcurrentMarkingOrFinished() const
491     {
492         return !IsReadyToConcurrentMark();
493     }
494 
IsReadyToConcurrentMark()495     bool IsReadyToConcurrentMark() const
496     {
497         auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
498         return status == MarkStatus::READY_TO_MARK;
499     }
500 
IsMarking()501     bool IsMarking() const
502     {
503         auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
504         return status == MarkStatus::MARKING;
505     }
506 
IsMarkFinished()507     bool IsMarkFinished() const
508     {
509         auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_);
510         return status == MarkStatus::MARK_FINISHED;
511     }
512 
GetSharedMarkStatus()513     SharedMarkStatus GetSharedMarkStatus() const
514     {
515         return SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
516     }
517 
SetSharedMarkStatus(SharedMarkStatus status)518     void SetSharedMarkStatus(SharedMarkStatus status)
519     {
520         SharedMarkStatusBits::Set(status, &glueData_.sharedGCStateBitField_);
521     }
522 
IsSharedConcurrentMarkingOrFinished()523     bool IsSharedConcurrentMarkingOrFinished() const
524     {
525         auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
526         return status == SharedMarkStatus::CONCURRENT_MARKING_OR_FINISHED;
527     }
528 
IsReadyToSharedConcurrentMark()529     bool IsReadyToSharedConcurrentMark() const
530     {
531         auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_);
532         return status == SharedMarkStatus::READY_TO_CONCURRENT_MARK;
533     }
534 
SetPGOProfilerEnable(bool enable)535     void SetPGOProfilerEnable(bool enable)
536     {
537         PGOProfilerStatus status =
538             enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE;
539         SetInterruptValue<PGOStatusBits>(status);
540     }
541 
IsPGOProfilerEnable()542     bool IsPGOProfilerEnable() const
543     {
544         auto status = PGOStatusBits::Decode(glueData_.interruptVector_);
545         return status == PGOProfilerStatus::PGO_PROFILER_ENABLE;
546     }
547 
SetBCStubStatus(BCStubStatus status)548     void SetBCStubStatus(BCStubStatus status)
549     {
550         SetInterruptValue<BCStubStatusBits>(status);
551     }
552 
GetBCStubStatus()553     BCStubStatus GetBCStubStatus() const
554     {
555         return BCStubStatusBits::Decode(glueData_.interruptVector_);
556     }
557 
558     bool ShouldHandleMarkingFinishedInSafepoint();
559 
560     bool CheckSafepoint();
561 
562     void CheckAndPassActiveBarrier();
563 
564     bool PassSuspendBarrier();
565 
SetGetStackSignal(bool isParseStack)566     void SetGetStackSignal(bool isParseStack)
567     {
568         getStackSignal_ = isParseStack;
569     }
570 
GetStackSignal()571     bool GetStackSignal() const
572     {
573         return getStackSignal_;
574     }
575 
SetNeedProfiling(bool needProfiling)576     void SetNeedProfiling(bool needProfiling)
577     {
578         needProfiling_.store(needProfiling);
579     }
580 
SetIsProfiling(bool isProfiling)581     void SetIsProfiling(bool isProfiling)
582     {
583         isProfiling_ = isProfiling;
584     }
585 
GetIsProfiling()586     bool GetIsProfiling() const
587     {
588         return isProfiling_;
589     }
590 
SetGcState(bool gcState)591     void SetGcState(bool gcState)
592     {
593         gcState_ = gcState;
594     }
595 
GetGcState()596     bool GetGcState() const
597     {
598         return gcState_;
599     }
600 
SetRuntimeState(bool runtimeState)601     void SetRuntimeState(bool runtimeState)
602     {
603         runtimeState_ = runtimeState;
604     }
605 
GetRuntimeState()606     bool GetRuntimeState() const
607     {
608         return runtimeState_;
609     }
610 
SetMainThread()611     bool SetMainThread()
612     {
613         return isMainThread_ = true;
614     }
615 
IsMainThreadFast()616     bool IsMainThreadFast() const
617     {
618         return isMainThread_;
619     }
620 
SetCpuProfileName(std::string & profileName)621     void SetCpuProfileName(std::string &profileName)
622     {
623         profileName_ = profileName;
624     }
625 
EnableAsmInterpreter()626     void EnableAsmInterpreter()
627     {
628         isAsmInterpreter_ = true;
629     }
630 
IsAsmInterpreter()631     bool IsAsmInterpreter() const
632     {
633         return isAsmInterpreter_;
634     }
635 
GetVmThreadControl()636     VmThreadControl *GetVmThreadControl() const
637     {
638         return vmThreadControl_;
639     }
640 
SetEnableStackSourceFile(bool value)641     void SetEnableStackSourceFile(bool value)
642     {
643         enableStackSourceFile_ = value;
644     }
645 
GetEnableStackSourceFile()646     bool GetEnableStackSourceFile() const
647     {
648         return enableStackSourceFile_;
649     }
650 
SetEnableLazyBuiltins(bool value)651     void SetEnableLazyBuiltins(bool value)
652     {
653         enableLazyBuiltins_ = value;
654     }
655 
GetEnableLazyBuiltins()656     bool GetEnableLazyBuiltins() const
657     {
658         return enableLazyBuiltins_;
659     }
660 
SetReadyForGCIterating(bool flag)661     void SetReadyForGCIterating(bool flag)
662     {
663         readyForGCIterating_ = flag;
664     }
665 
ReadyForGCIterating()666     bool ReadyForGCIterating() const
667     {
668         return readyForGCIterating_;
669     }
670 
GetGlueDataOffset()671     static constexpr size_t GetGlueDataOffset()
672     {
673         return MEMBER_OFFSET(JSThread, glueData_);
674     }
675 
GetGlueAddr()676     uintptr_t GetGlueAddr() const
677     {
678         return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset();
679     }
680 
GlueToJSThread(uintptr_t glue)681     static JSThread *GlueToJSThread(uintptr_t glue)
682     {
683         // very careful to modify here
684         return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset());
685     }
686 
SetCheckSafePointStatus()687     void SetCheckSafePointStatus()
688     {
689         ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1);
690         SetInterruptValue<CheckSafePointBit>(true);
691     }
692 
ResetCheckSafePointStatus()693     void ResetCheckSafePointStatus()
694     {
695         ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1);
696         SetInterruptValue<CheckSafePointBit>(false);
697     }
698 
SetVMNeedSuspension(bool flag)699     void SetVMNeedSuspension(bool flag)
700     {
701         SetInterruptValue<VMNeedSuspensionBit>(flag);
702     }
703 
VMNeedSuspension()704     bool VMNeedSuspension()
705     {
706         return VMNeedSuspensionBit::Decode(glueData_.interruptVector_);
707     }
708 
SetVMSuspended(bool flag)709     void SetVMSuspended(bool flag)
710     {
711         SetInterruptValue<VMHasSuspendedBit>(flag);
712     }
713 
IsVMSuspended()714     bool IsVMSuspended()
715     {
716         return VMHasSuspendedBit::Decode(glueData_.interruptVector_);
717     }
718 
HasTerminationRequest()719     bool HasTerminationRequest() const
720     {
721         return needTermination_;
722     }
723 
SetTerminationRequest(bool flag)724     void SetTerminationRequest(bool flag)
725     {
726         needTermination_ = flag;
727     }
728 
SetVMTerminated(bool flag)729     void SetVMTerminated(bool flag)
730     {
731         hasTerminated_ = flag;
732     }
733 
HasTerminated()734     bool HasTerminated() const
735     {
736         return hasTerminated_;
737     }
738 
739     void TerminateExecution();
740 
SetInstallMachineCode(bool flag)741     void SetInstallMachineCode(bool flag)
742     {
743         SetInterruptValue<InstallMachineCodeBit>(flag);
744     }
745 
HasInstallMachineCode()746     bool HasInstallMachineCode() const
747     {
748         return InstallMachineCodeBit::Decode(glueData_.interruptVector_);
749     }
750 
GetCurrentStackPosition()751     static uintptr_t GetCurrentStackPosition()
752     {
753         return reinterpret_cast<uintptr_t>(__builtin_frame_address(0));
754     }
755 
756     bool IsLegalAsmSp(uintptr_t sp) const;
757 
758     bool IsLegalThreadSp(uintptr_t sp) const;
759 
760     bool IsLegalSp(uintptr_t sp) const;
761 
SetCheckAndCallEnterState(bool state)762     void SetCheckAndCallEnterState(bool state)
763     {
764         finalizationCheckState_ = state;
765     }
766 
GetCheckAndCallEnterState()767     bool GetCheckAndCallEnterState() const
768     {
769         return finalizationCheckState_;
770     }
771 
GetStackStart()772     uint64_t GetStackStart() const
773     {
774         return glueData_.stackStart_;
775     }
776 
GetStackLimit()777     uint64_t GetStackLimit() const
778     {
779         return glueData_.stackLimit_;
780     }
781 
GetGlueGlobalEnv()782     GlobalEnv *GetGlueGlobalEnv()
783     {
784         return glueData_.glueGlobalEnv_;
785     }
786 
SetGlueGlobalEnv(GlobalEnv * global)787     void SetGlueGlobalEnv(GlobalEnv *global)
788     {
789         ASSERT(global != nullptr);
790         glueData_.glueGlobalEnv_ = global;
791     }
792 
NewGlobalHandle(JSTaggedType value)793     inline uintptr_t NewGlobalHandle(JSTaggedType value)
794     {
795         return newGlobalHandle_(value);
796     }
797 
DisposeGlobalHandle(uintptr_t nodeAddr)798     inline void DisposeGlobalHandle(uintptr_t nodeAddr)
799     {
800         disposeGlobalHandle_(nodeAddr);
801     }
802 
803     inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr,
804                              WeakClearCallback nativeFinalizeCallBack = nullptr)
805     {
806         return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack);
807     }
808 
ClearWeak(uintptr_t nodeAddr)809     inline uintptr_t ClearWeak(uintptr_t nodeAddr)
810     {
811         return clearWeak_(nodeAddr);
812     }
813 
IsWeak(uintptr_t addr)814     inline bool IsWeak(uintptr_t addr) const
815     {
816         return isWeak_(addr);
817     }
818 
EnableCrossThreadExecution()819     void EnableCrossThreadExecution()
820     {
821         glueData_.allowCrossThreadExecution_ = true;
822     }
823 
IsCrossThreadExecutionEnable()824     bool IsCrossThreadExecutionEnable() const
825     {
826         return glueData_.allowCrossThreadExecution_;
827     }
828 
IsFrameDropped()829     bool IsFrameDropped()
830     {
831         return glueData_.isFrameDropped_;
832     }
833 
SetFrameDroppedState()834     void SetFrameDroppedState()
835     {
836         glueData_.isFrameDropped_ = true;
837     }
838 
ResetFrameDroppedState()839     void ResetFrameDroppedState()
840     {
841         glueData_.isFrameDropped_ = false;
842     }
843 
IsEntryFrameDroppedTrue()844     bool IsEntryFrameDroppedTrue()
845     {
846         return glueData_.entryFrameDroppedState_ == FrameDroppedState::StateTrue;
847     }
848 
IsEntryFrameDroppedPending()849     bool IsEntryFrameDroppedPending()
850     {
851         return glueData_.entryFrameDroppedState_ == FrameDroppedState::StatePending;
852     }
853 
SetEntryFrameDroppedState()854     void SetEntryFrameDroppedState()
855     {
856         glueData_.entryFrameDroppedState_ = FrameDroppedState::StateTrue;
857     }
858 
ResetEntryFrameDroppedState()859     void ResetEntryFrameDroppedState()
860     {
861         glueData_.entryFrameDroppedState_ = FrameDroppedState::StateFalse;
862     }
863 
PendingEntryFrameDroppedState()864     void PendingEntryFrameDroppedState()
865     {
866         glueData_.entryFrameDroppedState_ = FrameDroppedState::StatePending;
867     }
868 
IsDebugMode()869     bool IsDebugMode()
870     {
871         return glueData_.isDebugMode_;
872     }
873 
SetDebugModeState()874     void SetDebugModeState()
875     {
876         glueData_.isDebugMode_ = true;
877     }
878 
ResetDebugModeState()879     void ResetDebugModeState()
880     {
881         glueData_.isDebugMode_ = false;
882     }
883 
884     template<typename T, typename V>
SetInterruptValue(V value)885     void SetInterruptValue(V value)
886     {
887         volatile auto interruptValue =
888             reinterpret_cast<volatile std::atomic<uint64_t> *>(&glueData_.interruptVector_);
889         uint64_t oldValue = interruptValue->load(std::memory_order_relaxed);
890         auto newValue = oldValue;
891         do {
892             newValue = oldValue;
893             T::Set(value, &newValue);
894         } while (!std::atomic_compare_exchange_strong_explicit(interruptValue, &oldValue, newValue,
895                                                                std::memory_order_release,
896                                                                std::memory_order_relaxed));
897     }
898 
899     void InvokeWeakNodeFreeGlobalCallBack();
900     void InvokeWeakNodeNativeFinalizeCallback();
901     bool IsStartGlobalLeakCheck() const;
902     bool EnableGlobalObjectLeakCheck() const;
903     bool EnableGlobalPrimitiveLeakCheck() const;
904     void WriteToStackTraceFd(std::ostringstream &buffer) const;
905     void SetStackTraceFd(int32_t fd);
906     void CloseStackTraceFd();
IncreaseGlobalNumberCount()907     uint32_t IncreaseGlobalNumberCount()
908     {
909         return ++globalNumberCount_;
910     }
911 
SetPropertiesGrowStep(uint32_t step)912     void SetPropertiesGrowStep(uint32_t step)
913     {
914         glueData_.propertiesGrowStep_ = step;
915     }
916 
GetPropertiesGrowStep()917     uint32_t GetPropertiesGrowStep() const
918     {
919         return glueData_.propertiesGrowStep_;
920     }
921 
SetRandomStatePtr(uint64_t * ptr)922     void SetRandomStatePtr(uint64_t *ptr)
923     {
924         glueData_.randomStatePtr_ = reinterpret_cast<uintptr_t>(ptr);
925     }
926 
SetTaskInfo(uintptr_t taskInfo)927     void SetTaskInfo(uintptr_t taskInfo)
928     {
929         glueData_.taskInfo_ = taskInfo;
930     }
931 
GetTaskInfo()932     uintptr_t GetTaskInfo() const
933     {
934         return glueData_.taskInfo_;
935     }
936 
937     void SetJitCodeMap(JSTaggedType exception,  MachineCode* machineCode, std::string &methodName, uintptr_t offset);
938 
GetJitCodeMaps()939     std::map<JSTaggedType, JitCodeVector*> &GetJitCodeMaps()
940     {
941         return jitCodeMaps_;
942     }
943 
IsEnableMutantArray()944     bool IsEnableMutantArray() const
945     {
946         return glueData_.isEnableMutantArray_;
947     }
948 
IsEnableElementsKind()949     bool IsEnableElementsKind() const
950     {
951         return glueData_.IsEnableElementsKind_;
952     }
953 
954     struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(),
955                                                  BCStubEntries,
956                                                  JSTaggedValue,
957                                                  JSTaggedValue,
958                                                  base::AlignedBool,
959                                                  base::AlignedPointer,
960                                                  base::AlignedPointer,
961                                                  base::AlignedPointer,
962                                                  base::AlignedPointer,
963                                                  base::AlignedPointer,
964                                                  base::AlignedPointer,
965                                                  base::AlignedPointer,
966                                                  base::AlignedPointer,
967                                                  base::AlignedPointer,
968                                                  RTStubEntries,
969                                                  COStubEntries,
970                                                  BuiltinStubEntries,
971                                                  BuiltinHClassEntries,
972                                                  BCDebuggerStubEntries,
973                                                  BaselineStubEntries,
974                                                  base::AlignedUint64,
975                                                  base::AlignedUint64,
976                                                  base::AlignedPointer,
977                                                  base::AlignedUint64,
978                                                  base::AlignedUint64,
979                                                  base::AlignedPointer,
980                                                  base::AlignedPointer,
981                                                  base::AlignedUint64,
982                                                  base::AlignedUint64,
983                                                  JSTaggedValue,
984                                                  base::AlignedBool,
985                                                  base::AlignedBool,
986                                                  base::AlignedUint32,
987                                                  JSTaggedValue,
988                                                  base::AlignedPointer,
989                                                  BuiltinEntries,
990                                                  base::AlignedBool,
991                                                  base::AlignedUint32,
992                                                  base::AlignedPointer,
993                                                  base::AlignedPointer,
994                                                  base::AlignedPointer,
995                                                  base::AlignedUint32,
996                                                  base::AlignedBool,
997                                                  base::AlignedBool,
998                                                  ElementsHClassEntries> {
999         enum class Index : size_t {
1000             BcStubEntriesIndex = 0,
1001             ExceptionIndex,
1002             GlobalObjIndex,
1003             ArrayElementsGuardiansIndex,
1004             CurrentFrameIndex,
1005             LeaveFrameIndex,
1006             LastFpIndex,
1007             NewSpaceAllocationTopAddressIndex,
1008             NewSpaceAllocationEndAddressIndex,
1009             SOldSpaceAllocationTopAddressIndex,
1010             SOldSpaceAllocationEndAddressIndex,
1011             SNonMovableSpaceAllocationTopAddressIndex,
1012             SNonMovableSpaceAllocationEndAddressIndex,
1013             RTStubEntriesIndex,
1014             COStubEntriesIndex,
1015             BuiltinsStubEntriesIndex,
1016             BuiltinHClassEntriesIndex,
1017             BcDebuggerStubEntriesIndex,
1018             BaselineStubEntriesIndex,
1019             GCStateBitFieldIndex,
1020             SharedGCStateBitFieldIndex,
1021             FrameBaseIndex,
1022             StackStartIndex,
1023             StackLimitIndex,
1024             GlueGlobalEnvIndex,
1025             GlobalConstIndex,
1026             AllowCrossThreadExecutionIndex,
1027             InterruptVectorIndex,
1028             IsStartHeapSamplingIndex,
1029             IsDebugModeIndex,
1030             IsFrameDroppedIndex,
1031             PropertiesGrowStepIndex,
1032             EntryFrameDroppedStateIndex,
1033             CurrentContextIndex,
1034             BuiltinEntriesIndex,
1035             IsTracingIndex,
1036             UnsharedConstpoolsArrayLenIndex,
1037             UnsharedConstpoolsIndex,
1038             RandomStatePtrIndex,
1039             StateAndFlagsIndex,
1040             TaskInfoIndex,
1041             IsEnableMutantArrayIndex,
1042             IsEnableElementsKindIndex,
1043             ArrayHClassIndexesIndex,
1044             NumOfMembers
1045         };
1046         static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes);
1047 
GetExceptionOffsetGlueData1048         static size_t GetExceptionOffset(bool isArch32)
1049         {
1050             return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32);
1051         }
1052 
GetGlobalObjOffsetGlueData1053         static size_t GetGlobalObjOffset(bool isArch32)
1054         {
1055             return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32);
1056         }
1057 
GetArrayElementsGuardiansOffsetGlueData1058         static size_t GetArrayElementsGuardiansOffset(bool isArch32)
1059         {
1060             return GetOffset<static_cast<size_t>(Index::ArrayElementsGuardiansIndex)>(isArch32);
1061         }
1062 
GetGlobalConstOffsetGlueData1063         static size_t GetGlobalConstOffset(bool isArch32)
1064         {
1065             return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32);
1066         }
1067 
GetGCStateBitFieldOffsetGlueData1068         static size_t GetGCStateBitFieldOffset(bool isArch32)
1069         {
1070             return GetOffset<static_cast<size_t>(Index::GCStateBitFieldIndex)>(isArch32);
1071         }
1072 
GetSharedGCStateBitFieldOffsetGlueData1073         static size_t GetSharedGCStateBitFieldOffset(bool isArch32)
1074         {
1075             return GetOffset<static_cast<size_t>(Index::SharedGCStateBitFieldIndex)>(isArch32);
1076         }
1077 
GetCurrentFrameOffsetGlueData1078         static size_t GetCurrentFrameOffset(bool isArch32)
1079         {
1080             return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32);
1081         }
1082 
GetLeaveFrameOffsetGlueData1083         static size_t GetLeaveFrameOffset(bool isArch32)
1084         {
1085             return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32);
1086         }
1087 
GetLastFpOffsetGlueData1088         static size_t GetLastFpOffset(bool isArch32)
1089         {
1090             return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32);
1091         }
1092 
GetNewSpaceAllocationTopAddressOffsetGlueData1093         static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32)
1094         {
1095             return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32);
1096         }
1097 
GetNewSpaceAllocationEndAddressOffsetGlueData1098         static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32)
1099         {
1100             return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32);
1101         }
1102 
GetSOldSpaceAllocationTopAddressOffsetGlueData1103         static size_t GetSOldSpaceAllocationTopAddressOffset(bool isArch32)
1104         {
1105             return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationTopAddressIndex)>(isArch32);
1106         }
1107 
GetSOldSpaceAllocationEndAddressOffsetGlueData1108         static size_t GetSOldSpaceAllocationEndAddressOffset(bool isArch32)
1109         {
1110             return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationEndAddressIndex)>(isArch32);
1111         }
1112 
GetSNonMovableSpaceAllocationTopAddressOffsetGlueData1113         static size_t GetSNonMovableSpaceAllocationTopAddressOffset(bool isArch32)
1114         {
1115             return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationTopAddressIndex)>(isArch32);
1116         }
1117 
GetSNonMovableSpaceAllocationEndAddressOffsetGlueData1118         static size_t GetSNonMovableSpaceAllocationEndAddressOffset(bool isArch32)
1119         {
1120             return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationEndAddressIndex)>(isArch32);
1121         }
1122 
GetBCStubEntriesOffsetGlueData1123         static size_t GetBCStubEntriesOffset(bool isArch32)
1124         {
1125             return GetOffset<static_cast<size_t>(Index::BcStubEntriesIndex)>(isArch32);
1126         }
1127 
GetRTStubEntriesOffsetGlueData1128         static size_t GetRTStubEntriesOffset(bool isArch32)
1129         {
1130             return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32);
1131         }
1132 
GetCOStubEntriesOffsetGlueData1133         static size_t GetCOStubEntriesOffset(bool isArch32)
1134         {
1135             return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32);
1136         }
1137 
GetBaselineStubEntriesOffsetGlueData1138         static size_t GetBaselineStubEntriesOffset(bool isArch32)
1139         {
1140             return GetOffset<static_cast<size_t>(Index::BaselineStubEntriesIndex)>(isArch32);
1141         }
1142 
GetBuiltinsStubEntriesOffsetGlueData1143         static size_t GetBuiltinsStubEntriesOffset(bool isArch32)
1144         {
1145             return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32);
1146         }
1147 
GetBuiltinHClassEntriesOffsetGlueData1148         static size_t GetBuiltinHClassEntriesOffset(bool isArch32)
1149         {
1150             return GetOffset<static_cast<size_t>(Index::BuiltinHClassEntriesIndex)>(isArch32);
1151         }
1152 
GetBuiltinHClassOffsetGlueData1153         static size_t GetBuiltinHClassOffset(BuiltinTypeId type, bool isArch32)
1154         {
1155             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetBuiltinHClassOffset(type);
1156         }
1157 
GetBuiltinInstanceHClassOffsetGlueData1158         static size_t GetBuiltinInstanceHClassOffset(BuiltinTypeId type, bool isArch32)
1159         {
1160             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetInstanceHClassOffset(type);
1161         }
1162 
GetBuiltinPrototypeHClassOffsetGlueData1163         static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId type, bool isArch32)
1164         {
1165             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetPrototypeHClassOffset(type);
1166         }
1167 
GetBuiltinPrototypeOfPrototypeHClassOffsetGlueData1168         static size_t GetBuiltinPrototypeOfPrototypeHClassOffset(BuiltinTypeId type, bool isArch32)
1169         {
1170             return GetBuiltinHClassEntriesOffset(isArch32) +
1171                    BuiltinHClassEntries::GetPrototypeOfPrototypeHClassOffset(type);
1172         }
1173 
GetBuiltinExtraHClassOffsetGlueData1174         static size_t GetBuiltinExtraHClassOffset(BuiltinTypeId type, bool isArch32)
1175         {
1176             return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetExtraHClassOffset(type);
1177         }
1178 
GetBCDebuggerStubEntriesOffsetGlueData1179         static size_t GetBCDebuggerStubEntriesOffset(bool isArch32)
1180         {
1181             return GetOffset<static_cast<size_t>(Index::BcDebuggerStubEntriesIndex)>(isArch32);
1182         }
1183 
GetFrameBaseOffsetGlueData1184         static size_t GetFrameBaseOffset(bool isArch32)
1185         {
1186             return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32);
1187         }
1188 
GetStackLimitOffsetGlueData1189         static size_t GetStackLimitOffset(bool isArch32)
1190         {
1191             return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32);
1192         }
1193 
GetGlueGlobalEnvOffsetGlueData1194         static size_t GetGlueGlobalEnvOffset(bool isArch32)
1195         {
1196             return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32);
1197         }
1198 
GetAllowCrossThreadExecutionOffsetGlueData1199         static size_t GetAllowCrossThreadExecutionOffset(bool isArch32)
1200         {
1201             return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32);
1202         }
1203 
GetInterruptVectorOffsetGlueData1204         static size_t GetInterruptVectorOffset(bool isArch32)
1205         {
1206             return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32);
1207         }
1208 
GetIsStartHeapSamplingOffsetGlueData1209         static size_t GetIsStartHeapSamplingOffset(bool isArch32)
1210         {
1211             return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32);
1212         }
1213 
GetIsDebugModeOffsetGlueData1214         static size_t GetIsDebugModeOffset(bool isArch32)
1215         {
1216             return GetOffset<static_cast<size_t>(Index::IsDebugModeIndex)>(isArch32);
1217         }
1218 
GetIsFrameDroppedOffsetGlueData1219         static size_t GetIsFrameDroppedOffset(bool isArch32)
1220         {
1221             return GetOffset<static_cast<size_t>(Index::IsFrameDroppedIndex)>(isArch32);
1222         }
1223 
GetPropertiesGrowStepOffsetGlueData1224         static size_t GetPropertiesGrowStepOffset(bool isArch32)
1225         {
1226             return GetOffset<static_cast<size_t>(Index::PropertiesGrowStepIndex)>(isArch32);
1227         }
1228 
GetEntryFrameDroppedStateOffsetGlueData1229         static size_t GetEntryFrameDroppedStateOffset(bool isArch32)
1230         {
1231             return GetOffset<static_cast<size_t>(Index::EntryFrameDroppedStateIndex)>(isArch32);
1232         }
1233 
GetCurrentContextOffsetGlueData1234         static size_t GetCurrentContextOffset(bool isArch32)
1235         {
1236             return GetOffset<static_cast<size_t>(Index::CurrentContextIndex)>(isArch32);
1237         }
1238 
GetBuiltinEntriesOffsetGlueData1239         static size_t GetBuiltinEntriesOffset(bool isArch32)
1240         {
1241             return GetOffset<static_cast<size_t>(Index::BuiltinEntriesIndex)>(isArch32);
1242         }
1243 
GetIsTracingOffsetGlueData1244         static size_t GetIsTracingOffset(bool isArch32)
1245         {
1246             return GetOffset<static_cast<size_t>(Index::IsTracingIndex)>(isArch32);
1247         }
1248 
GetUnSharedConstpoolsOffsetGlueData1249         static size_t GetUnSharedConstpoolsOffset(bool isArch32)
1250         {
1251             return GetOffset<static_cast<size_t>(Index::UnsharedConstpoolsIndex)>(isArch32);
1252         }
1253 
GetUnSharedConstpoolsArrayLenOffsetGlueData1254         static size_t GetUnSharedConstpoolsArrayLenOffset(bool isArch32)
1255         {
1256             return GetOffset<static_cast<size_t>(Index::UnsharedConstpoolsArrayLenIndex)>(isArch32);
1257         }
1258 
GetStateAndFlagsOffsetGlueData1259         static size_t GetStateAndFlagsOffset(bool isArch32)
1260         {
1261             return GetOffset<static_cast<size_t>(Index::StateAndFlagsIndex)>(isArch32);
1262         }
1263 
GetRandomStatePtrOffsetGlueData1264         static size_t GetRandomStatePtrOffset(bool isArch32)
1265         {
1266             return GetOffset<static_cast<size_t>(Index::RandomStatePtrIndex)>(isArch32);
1267         }
1268 
GetTaskInfoOffsetGlueData1269         static size_t GetTaskInfoOffset(bool isArch32)
1270         {
1271             return GetOffset<static_cast<size_t>(Index::TaskInfoIndex)>(isArch32);
1272         }
1273 
GetIsEnableMutantArrayOffsetGlueData1274         static size_t GetIsEnableMutantArrayOffset(bool isArch32)
1275         {
1276             return GetOffset<static_cast<size_t>(Index::IsEnableMutantArrayIndex)>(isArch32);
1277         }
1278 
GetIsEnableElementsKindOffsetGlueData1279         static size_t GetIsEnableElementsKindOffset(bool isArch32)
1280         {
1281             return GetOffset<static_cast<size_t>(Index::IsEnableElementsKindIndex)>(isArch32);
1282         }
1283 
GetArrayHClassIndexesIndexOffsetGlueData1284         static size_t GetArrayHClassIndexesIndexOffset(bool isArch32)
1285         {
1286             return GetOffset<static_cast<size_t>(Index::ArrayHClassIndexesIndex)>(isArch32);
1287         }
1288 
1289         alignas(EAS) BCStubEntries bcStubEntries_ {};
1290         alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()};
1291         alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()};
1292         alignas(EAS) bool arrayPrototypeChangedGuardians_ {true};
1293         alignas(EAS) JSTaggedType *currentFrame_ {nullptr};
1294         alignas(EAS) JSTaggedType *leaveFrame_ {nullptr};
1295         alignas(EAS) JSTaggedType *lastFp_ {nullptr};
1296         alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr};
1297         alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr};
1298         alignas(EAS) const uintptr_t *sOldSpaceAllocationTopAddress_ {nullptr};
1299         alignas(EAS) const uintptr_t *sOldSpaceAllocationEndAddress_ {nullptr};
1300         alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationTopAddress_ {nullptr};
1301         alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationEndAddress_ {nullptr};
1302         alignas(EAS) RTStubEntries rtStubEntries_ {};
1303         alignas(EAS) COStubEntries coStubEntries_ {};
1304         alignas(EAS) BuiltinStubEntries builtinStubEntries_ {};
1305         alignas(EAS) BuiltinHClassEntries builtinHClassEntries_ {};
1306         alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_ {};
1307         alignas(EAS) BaselineStubEntries baselineStubEntries_ {};
1308         alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL};
1309         alignas(EAS) volatile uint64_t sharedGCStateBitField_ {0ULL};
1310         alignas(EAS) JSTaggedType *frameBase_ {nullptr};
1311         alignas(EAS) uint64_t stackStart_ {0};
1312         alignas(EAS) uint64_t stackLimit_ {0};
1313         alignas(EAS) GlobalEnv *glueGlobalEnv_ {nullptr};
1314         alignas(EAS) GlobalEnvConstants *globalConst_ {nullptr};
1315         alignas(EAS) bool allowCrossThreadExecution_ {false};
1316         alignas(EAS) volatile uint64_t interruptVector_ {0};
1317         alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()};
1318         alignas(EAS) bool isDebugMode_ {false};
1319         alignas(EAS) bool isFrameDropped_ {false};
1320         alignas(EAS) uint32_t propertiesGrowStep_ {JSObjectResizingStrategy::PROPERTIES_GROW_SIZE};
1321         alignas(EAS) uint64_t entryFrameDroppedState_ {FrameDroppedState::StateFalse};
1322         alignas(EAS) EcmaContext *currentContext_ {nullptr};
1323         alignas(EAS) BuiltinEntries builtinEntries_ {};
1324         alignas(EAS) bool isTracing_ {false};
1325         alignas(EAS) uint32_t unsharedConstpoolsArrayLen_ {0};
1326         alignas(EAS) uintptr_t unsharedConstpools_ {0};
1327         alignas(EAS) uintptr_t randomStatePtr_ {0};
1328         alignas(EAS) ThreadStateAndFlags stateAndFlags_ {};
1329         alignas(EAS) uintptr_t taskInfo_ {0};
1330         alignas(EAS) bool isEnableMutantArray_ {false};
1331         alignas(EAS) bool IsEnableElementsKind_ {false};
1332         alignas(EAS) ElementsHClassEntries arrayHClassIndexes_ {};
1333     };
1334     STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64);
1335 
1336     void PushContext(EcmaContext *context);
1337     void PopContext();
1338 
GetCurrentEcmaContext()1339     EcmaContext *GetCurrentEcmaContext() const
1340     {
1341         return glueData_.currentContext_;
1342     }
1343 
GetSingleCharTable()1344     JSTaggedValue GetSingleCharTable() const
1345     {
1346         ASSERT(glueData_.globalConst_->GetSingleCharTable() != JSTaggedValue::Hole());
1347         return glueData_.globalConst_->GetSingleCharTable();
1348     }
1349 
1350     void SwitchCurrentContext(EcmaContext *currentContext, bool isInIterate = false);
1351 
GetEcmaContexts()1352     CVector<EcmaContext *> GetEcmaContexts()
1353     {
1354         return contexts_;
1355     }
1356 
IsInSubStack()1357     bool IsInSubStack() const
1358     {
1359         return isInSubStack_;
1360     }
1361 
GetMainStackInfo()1362     const StackInfo &GetMainStackInfo() const
1363     {
1364         return mainStackInfo_;
1365     }
1366 
1367     bool IsPropertyCacheCleared() const;
1368 
1369     bool EraseContext(EcmaContext *context);
1370     void ClearContextCachedConstantPool();
1371 
1372     const GlobalEnvConstants *GetFirstGlobalConst() const;
1373     bool IsAllContextsInitialized() const;
1374     bool IsReadyToUpdateDetector() const;
1375     Area *GetOrCreateRegExpCache();
1376 
1377     void InitializeBuiltinObject(const std::string& key);
1378     void InitializeBuiltinObject();
1379 
FullMarkRequest()1380     bool FullMarkRequest() const
1381     {
1382         return fullMarkRequest_;
1383     }
1384 
SetFullMarkRequest()1385     void SetFullMarkRequest()
1386     {
1387         fullMarkRequest_ = true;
1388     }
1389 
ResetFullMarkRequest()1390     void ResetFullMarkRequest()
1391     {
1392         fullMarkRequest_ = false;
1393     }
1394 
SetProcessingLocalToSharedRset(bool processing)1395     void SetProcessingLocalToSharedRset(bool processing)
1396     {
1397         processingLocalToSharedRset_ = processing;
1398     }
1399 
IsProcessingLocalToSharedRset()1400     bool IsProcessingLocalToSharedRset() const
1401     {
1402         return processingLocalToSharedRset_;
1403     }
1404 
IsThreadSafe()1405     inline bool IsThreadSafe() const
1406     {
1407         return IsMainThread() || HasSuspendRequest();
1408     }
1409 
IsSuspended()1410     bool IsSuspended() const
1411     {
1412         bool f = ReadFlag(ThreadFlag::SUSPEND_REQUEST);
1413         bool s = (GetState() != ThreadState::RUNNING);
1414         return f && s;
1415     }
1416 
HasSuspendRequest()1417     inline bool HasSuspendRequest() const
1418     {
1419         return ReadFlag(ThreadFlag::SUSPEND_REQUEST);
1420     }
1421 
CheckSafepointIfSuspended()1422     void CheckSafepointIfSuspended()
1423     {
1424         if (HasSuspendRequest()) {
1425             WaitSuspension();
1426         }
1427     }
1428 
IsInSuspendedState()1429     bool IsInSuspendedState() const
1430     {
1431         return GetState() == ThreadState::IS_SUSPENDED;
1432     }
1433 
IsInRunningState()1434     bool IsInRunningState() const
1435     {
1436         return GetState() == ThreadState::RUNNING;
1437     }
1438 
1439     bool IsInRunningStateOrProfiling() const;
1440 
GetState()1441     ThreadState GetState() const
1442     {
1443         uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire);
1444         return static_cast<enum ThreadState>(stateAndFlags >> THREAD_STATE_OFFSET);
1445     }
1446     void PUBLIC_API UpdateState(ThreadState newState);
1447     void SuspendThread(bool internalSuspend, SuspendBarrier* barrier = nullptr);
1448     void ResumeThread(bool internalSuspend);
1449     void WaitSuspension();
1450     static bool IsMainThread();
1451     PUBLIC_API void ManagedCodeBegin();
1452     PUBLIC_API void ManagedCodeEnd();
1453 #ifndef NDEBUG
1454     bool IsInManagedState() const;
1455     MutatorLock::MutatorLockState GetMutatorLockState() const;
1456     void SetMutatorLockState(MutatorLock::MutatorLockState newState);
1457 #endif
SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback & callback)1458     void SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback &callback)
1459     {
1460         finalizeTaskCallback_ = callback;
1461     }
1462 
GetJobId()1463     uint64_t GetJobId()
1464     {
1465         if (jobId_ == UINT64_MAX) {
1466             jobId_ = 0;
1467         }
1468         return ++jobId_;
1469     }
1470 
SetAsyncCleanTaskCallback(const NativePointerTaskCallback & callback)1471     void SetAsyncCleanTaskCallback(const NativePointerTaskCallback &callback)
1472     {
1473         asyncCleanTaskCb_ = callback;
1474     }
1475 
GetAsyncCleanTaskCallback()1476     NativePointerTaskCallback GetAsyncCleanTaskCallback() const
1477     {
1478         return asyncCleanTaskCb_;
1479     }
1480 
1481     static void RegisterThread(JSThread *jsThread);
1482 
1483     static void UnregisterThread(JSThread *jsThread);
1484 
IsJSThread()1485     bool IsJSThread() const
1486     {
1487         return threadType_ == ThreadType::JS_THREAD;
1488     }
1489 
IsJitThread()1490     bool IsJitThread() const
1491     {
1492         return threadType_ == ThreadType::JIT_THREAD;
1493     }
1494 
IsDaemonThread()1495     bool IsDaemonThread() const
1496     {
1497         return threadType_ == ThreadType::DAEMON_THREAD;
1498     }
1499 
1500     // Daemon_Thread and JS_Thread have some difference in transition, for example, when transition to running,
1501     // JS_Thread may take some local_gc actions, but Daemon_Thread do not need.
1502     void TransferDaemonThreadToRunning();
1503 
GetJitLock()1504     RecursiveMutex *GetJitLock()
1505     {
1506         return &jitMutex_;
1507     }
1508 
GetProfileTypeAccessorLock()1509     RecursiveMutex &GetProfileTypeAccessorLock()
1510     {
1511         return profileTypeAccessorLockMutex_;
1512     }
1513 
SetMachineCodeLowMemory(bool isLow)1514     void SetMachineCodeLowMemory(bool isLow)
1515     {
1516         machineCodeLowMemory_ = isLow;
1517     }
1518 
IsMachineCodeLowMemory()1519     bool IsMachineCodeLowMemory()
1520     {
1521         return machineCodeLowMemory_;
1522     }
1523 
GetEnv()1524     void *GetEnv() const
1525     {
1526         return env_;
1527     }
1528 
SetEnv(void * env)1529     void SetEnv(void *env)
1530     {
1531         env_ = env;
1532     }
1533 
SetIsInConcurrentScope(bool flag)1534     void SetIsInConcurrentScope(bool flag)
1535     {
1536         isInConcurrentScope_ = flag;
1537     }
1538 
IsInConcurrentScope()1539     bool IsInConcurrentScope()
1540     {
1541         return isInConcurrentScope_;
1542     }
1543 
1544     void UpdateStackInfo(void *stackInfo, StackInfoOpKind opKind);
1545 
GetDateUtils()1546     DateUtils *GetDateUtils() const
1547     {
1548         return dateUtils_;
1549     }
1550 
CheckMultiThread()1551     bool CheckMultiThread() const
1552     {
1553         return GetThreadId() != JSThread::GetCurrentThreadId() && !IsCrossThreadExecutionEnable();
1554     }
1555 
1556 #ifndef NDEBUG
LaunchSuspendAll()1557     inline void LaunchSuspendAll()
1558     {
1559         launchedSuspendAll_ = true;
1560     }
1561 
HasLaunchedSuspendAll()1562     inline bool HasLaunchedSuspendAll() const
1563     {
1564         return launchedSuspendAll_;
1565     }
1566 
CompleteSuspendAll()1567     inline void CompleteSuspendAll()
1568     {
1569         launchedSuspendAll_ = false;
1570     }
1571 #endif
1572 
1573 protected:
SetThreadId()1574     void SetThreadId()
1575     {
1576         id_.store(JSThread::GetCurrentThreadId(), std::memory_order_release);
1577     }
1578 
1579     // When call EcmaVM::PreFork(), the std::thread for Daemon_Thread is finished, but the Daemon_Thread instance
1580     // is still alive, and need to reset ThreadId to 0.
ResetThreadId()1581     void ResetThreadId()
1582     {
1583         id_.store(0, std::memory_order_release);
1584     }
1585 private:
1586     NO_COPY_SEMANTIC(JSThread);
1587     NO_MOVE_SEMANTIC(JSThread);
SetGlobalConst(GlobalEnvConstants * globalConst)1588     void SetGlobalConst(GlobalEnvConstants *globalConst)
1589     {
1590         glueData_.globalConst_ = globalConst;
1591     }
SetCurrentEcmaContext(EcmaContext * context)1592     void SetCurrentEcmaContext(EcmaContext *context)
1593     {
1594         glueData_.currentContext_ = context;
1595     }
1596 
1597     void TransferFromRunningToSuspended(ThreadState newState);
1598 
1599     void TransferToRunning();
1600 
1601     inline void StoreState(ThreadState newState);
1602 
1603     void StoreRunningState(ThreadState newState);
1604 
1605     void StoreSuspendedState(ThreadState newState);
1606 
ReadFlag(ThreadFlag flag)1607     bool ReadFlag(ThreadFlag flag) const
1608     {
1609         uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire);
1610         uint16_t flags = (stateAndFlags & THREAD_FLAGS_MASK);
1611         return (flags & static_cast<uint16_t>(flag)) != 0;
1612     }
1613 
SetFlag(ThreadFlag flag)1614     void SetFlag(ThreadFlag flag)
1615     {
1616         glueData_.stateAndFlags_.asAtomicInt.fetch_or(flag, std::memory_order_seq_cst);
1617     }
1618 
ClearFlag(ThreadFlag flag)1619     void ClearFlag(ThreadFlag flag)
1620     {
1621         glueData_.stateAndFlags_.asAtomicInt.fetch_and(UINT32_MAX ^ flag, std::memory_order_seq_cst);
1622     }
1623 
1624     void DumpStack() DUMP_API_ATTR;
1625 
1626     static size_t GetAsmStackLimit();
1627 
1628     static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB;
1629 
1630     GlueData glueData_;
1631     std::atomic<ThreadId> id_ {0};
1632     EcmaVM *vm_ {nullptr};
1633     void *env_ {nullptr};
1634     Area *regExpCache_ {nullptr};
1635 
1636     // MM: handles, global-handles, and aot-stubs.
1637     int nestedLevel_ = 0;
1638     NativeAreaAllocator *nativeAreaAllocator_ {nullptr};
1639     HeapRegionAllocator *heapRegionAllocator_ {nullptr};
1640     bool runningNativeFinalizeCallbacks_ {false};
1641     std::vector<std::pair<WeakClearCallback, void *>> weakNodeFreeGlobalCallbacks_ {};
1642     std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {};
1643 
1644     EcmaGlobalStorage<Node> *globalStorage_ {nullptr};
1645     EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr};
1646     int32_t stackTraceFd_ {-1};
1647     std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_;
1648     std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_;
1649     std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_,
1650          WeakClearCallback nativeFinalizeCallBack)> setWeak_;
1651     std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_;
1652     std::function<bool(uintptr_t addr)> isWeak_;
1653     NativePointerTaskCallback asyncCleanTaskCb_ {nullptr};
1654     WeakFinalizeTaskCallback finalizeTaskCallback_ {nullptr};
1655     uint32_t globalNumberCount_ {0};
1656 
1657     // Run-time state
1658     bool getStackSignal_ {false};
1659     bool runtimeState_ {false};
1660     bool isAsmInterpreter_ {false};
1661     VmThreadControl *vmThreadControl_ {nullptr};
1662     bool enableStackSourceFile_ {true};
1663     bool enableLazyBuiltins_ {false};
1664     bool readyForGCIterating_ {false};
1665     // CpuProfiler
1666     bool isProfiling_ {false};
1667     bool gcState_ {false};
1668     std::atomic_bool needProfiling_ {false};
1669     std::string profileName_ {""};
1670 
1671     bool finalizationCheckState_ {false};
1672     // Shared heap
1673     bool isMainThread_ {false};
1674     bool fullMarkRequest_ {false};
1675     // Shared heap collect local heap Rset
1676     bool processingLocalToSharedRset_ {false};
1677 
1678     CMap<JSHClass *, GlobalIndex> ctorHclassEntries_;
1679 
1680     CVector<EcmaContext *> contexts_;
1681     bool isInSubStack_ {false};
1682     StackInfo mainStackInfo_ { 0ULL, 0ULL };
1683     EcmaContext *currentContext_ {nullptr};
1684 
1685     Mutex suspendLock_;
1686     int32_t suspendCount_ {0};
1687     ConditionVariable suspendCondVar_;
1688     SuspendBarrier *suspendBarrier_ {nullptr};
1689 
1690     uint64_t jobId_ {0};
1691 
1692     ThreadType threadType_ {ThreadType::JS_THREAD};
1693     RecursiveMutex jitMutex_;
1694     bool machineCodeLowMemory_ {false};
1695     RecursiveMutex profileTypeAccessorLockMutex_;
1696     DateUtils *dateUtils_ {nullptr};
1697 
1698 #ifndef NDEBUG
1699     MutatorLock::MutatorLockState mutatorLockState_ = MutatorLock::MutatorLockState::UNLOCKED;
1700     std::atomic<bool> launchedSuspendAll_ {false};
1701 #endif
1702     // Collect a map from JsError to MachineCode objects, JsError objects with stack frame generated by jit in the map.
1703     // It will be used to keep MachineCode objects alive (for dump) before JsError object be free.
1704     std::map<JSTaggedType, JitCodeVector*> jitCodeMaps_;
1705 
1706     std::atomic<bool> needTermination_ {false};
1707     std::atomic<bool> hasTerminated_ {false};
1708 
1709     bool isInConcurrentScope_ {false};
1710 
1711     friend class GlobalHandleCollection;
1712     friend class EcmaVM;
1713     friend class EcmaContext;
1714     friend class JitVM;
1715 };
1716 }  // namespace panda::ecmascript
1717 #endif  // ECMASCRIPT_JS_THREAD_H
1718