1 /* 2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_JS_THREAD_H 17 #define ECMASCRIPT_JS_THREAD_H 18 19 #include <atomic> 20 #include <sstream> 21 #include <string> 22 #include <cstdint> 23 24 #include "ecmascript/base/aligned_struct.h" 25 #include "ecmascript/builtin_entries.h" 26 #include "ecmascript/daemon/daemon_task.h" 27 #include "ecmascript/global_index.h" 28 #include "ecmascript/js_object_resizing_strategy.h" 29 #include "ecmascript/js_tagged_value.h" 30 #include "ecmascript/js_thread_hclass_entries.h" 31 #include "ecmascript/js_thread_stub_entries.h" 32 #include "ecmascript/log_wrapper.h" 33 #include "ecmascript/mem/visitor.h" 34 #include "ecmascript/mutator_lock.h" 35 36 #if defined(ENABLE_FFRT_INTERFACES) 37 #include "ffrt.h" 38 #include "c/executor_task.h" 39 #endif 40 41 namespace panda::ecmascript { 42 class DateUtils; 43 class EcmaContext; 44 class EcmaVM; 45 class EcmaHandleScope; 46 class GlobalIndex; 47 class HeapRegionAllocator; 48 class PropertiesCache; 49 template<typename T> 50 class EcmaGlobalStorage; 51 class Node; 52 class DebugNode; 53 class VmThreadControl; 54 class GlobalEnvConstants; 55 enum class ElementsKind : uint8_t; 56 57 class MachineCode; 58 using JitCodeVector = std::vector<std::tuple<MachineCode*, std::string, uintptr_t>>; 59 using JitCodeMapVisitor = std::function<void(std::map<JSTaggedType, JitCodeVector*>&)>; 60 61 using WeakClearCallback = void (*)(void *); 62 63 enum class MarkStatus : uint8_t { 64 READY_TO_MARK, 65 MARKING, 66 MARK_FINISHED, 67 }; 68 69 enum class GCKind : uint8_t { 70 LOCAL_GC, 71 SHARED_GC 72 }; 73 74 enum class PGOProfilerStatus : uint8_t { 75 PGO_PROFILER_DISABLE, 76 PGO_PROFILER_ENABLE, 77 }; 78 79 enum class BCStubStatus: uint8_t { 80 NORMAL_BC_STUB, 81 PROFILE_BC_STUB, 82 JIT_PROFILE_BC_STUB, 83 }; 84 85 enum class StableArrayChangeKind { PROTO, NOT_PROTO }; 86 87 enum ThreadType : uint8_t { 88 JS_THREAD, 89 JIT_THREAD, 90 DAEMON_THREAD, 91 }; 92 93 enum ThreadFlag : uint16_t { 94 NO_FLAGS = 0 << 0, 95 SUSPEND_REQUEST = 1 << 0, 96 ACTIVE_BARRIER = 1 << 1, 97 }; 98 99 static constexpr uint32_t THREAD_STATE_OFFSET = 16; 100 static constexpr uint32_t THREAD_FLAGS_MASK = (0x1 << THREAD_STATE_OFFSET) - 1; 101 enum class ThreadState : uint16_t { 102 CREATED = 0, 103 RUNNING = 1, 104 NATIVE = 2, 105 WAIT = 3, 106 IS_SUSPENDED = 4, 107 TERMINATED = 5, 108 }; 109 110 union ThreadStateAndFlags { asInt(val)111 explicit ThreadStateAndFlags(uint32_t val = 0): asInt(val) {} 112 struct { 113 volatile uint16_t flags; 114 volatile ThreadState state; 115 } asStruct; 116 volatile uint32_t asInt; 117 uint32_t asNonvolatileInt; 118 std::atomic<uint32_t> asAtomicInt; 119 private: 120 NO_COPY_SEMANTIC(ThreadStateAndFlags); 121 }; 122 123 static constexpr uint32_t MAIN_THREAD_INDEX = 0; 124 125 class JSThread { 126 public: 127 static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2; 128 static constexpr int CONCURRENT_MARKING_BITFIELD_MASK = 0x3; 129 static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_NUM = 1; 130 static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_MASK = 0x1; 131 static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8; 132 static constexpr int PGO_PROFILER_BITFIELD_START = 16; 133 static constexpr int BOOL_BITFIELD_NUM = 1; 134 static constexpr int BCSTUBSTATUS_BITFIELD_NUM = 2; 135 static constexpr uint32_t RESERVE_STACK_SIZE = 128; 136 using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>; 137 using SharedMarkStatusBits = BitField<SharedMarkStatus, 0, SHARED_CONCURRENT_MARKING_BITFIELD_NUM>; 138 using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>; 139 using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>; 140 using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag; 141 using InstallMachineCodeBit = VMHasSuspendedBit::NextFlag; 142 using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>; 143 using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BCSTUBSTATUS_BITFIELD_NUM>; 144 using ThreadId = uint32_t; 145 146 enum FrameDroppedState { 147 StateFalse = 0, 148 StateTrue, 149 StatePending 150 }; 151 152 explicit JSThread(EcmaVM *vm); 153 // only used in jit thread 154 explicit JSThread(EcmaVM *vm, ThreadType threadType); 155 // only used in daemon thread 156 explicit JSThread(ThreadType threadType); 157 158 PUBLIC_API ~JSThread(); 159 GetEcmaVM()160 EcmaVM *GetEcmaVM() const 161 { 162 return vm_; 163 } 164 165 static JSThread *Create(EcmaVM *vm); 166 static JSThread *GetCurrent(); 167 GetNestedLevel()168 int GetNestedLevel() const 169 { 170 return nestedLevel_; 171 } 172 SetNestedLevel(int level)173 void SetNestedLevel(int level) 174 { 175 nestedLevel_ = level; 176 } 177 SetLastFp(JSTaggedType * fp)178 void SetLastFp(JSTaggedType *fp) 179 { 180 glueData_.lastFp_ = fp; 181 } 182 GetLastFp()183 const JSTaggedType *GetLastFp() const 184 { 185 return glueData_.lastFp_; 186 } 187 GetCurrentSPFrame()188 const JSTaggedType *GetCurrentSPFrame() const 189 { 190 return glueData_.currentFrame_; 191 } 192 SetCurrentSPFrame(JSTaggedType * sp)193 void SetCurrentSPFrame(JSTaggedType *sp) 194 { 195 glueData_.currentFrame_ = sp; 196 } 197 GetLastLeaveFrame()198 const JSTaggedType *GetLastLeaveFrame() const 199 { 200 return glueData_.leaveFrame_; 201 } 202 SetLastLeaveFrame(JSTaggedType * sp)203 void SetLastLeaveFrame(JSTaggedType *sp) 204 { 205 glueData_.leaveFrame_ = sp; 206 } 207 208 const JSTaggedType *GetCurrentFrame() const; 209 210 void SetCurrentFrame(JSTaggedType *sp); 211 212 const JSTaggedType *GetCurrentInterpretedFrame() const; 213 214 bool DoStackOverflowCheck(const JSTaggedType *sp); 215 216 bool DoStackLimitCheck(); 217 GetNativeAreaAllocator()218 NativeAreaAllocator *GetNativeAreaAllocator() const 219 { 220 return nativeAreaAllocator_; 221 } 222 GetHeapRegionAllocator()223 HeapRegionAllocator *GetHeapRegionAllocator() const 224 { 225 return heapRegionAllocator_; 226 } 227 ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)228 void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 229 { 230 glueData_.newSpaceAllocationTopAddress_ = top; 231 glueData_.newSpaceAllocationEndAddress_ = end; 232 } 233 ReSetSOldSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)234 void ReSetSOldSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 235 { 236 glueData_.sOldSpaceAllocationTopAddress_ = top; 237 glueData_.sOldSpaceAllocationEndAddress_ = end; 238 } 239 ReSetSNonMovableSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)240 void ReSetSNonMovableSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 241 { 242 glueData_.sNonMovableSpaceAllocationTopAddress_ = top; 243 glueData_.sNonMovableSpaceAllocationEndAddress_ = end; 244 } 245 GetUnsharedConstpools()246 uintptr_t GetUnsharedConstpools() const 247 { 248 return glueData_.unsharedConstpools_; 249 } 250 SetUnsharedConstpools(uintptr_t unsharedConstpools)251 void SetUnsharedConstpools(uintptr_t unsharedConstpools) 252 { 253 glueData_.unsharedConstpools_ = unsharedConstpools; 254 } 255 SetIsStartHeapSampling(bool isStart)256 void SetIsStartHeapSampling(bool isStart) 257 { 258 glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False(); 259 } 260 SetIsTracing(bool isTracing)261 void SetIsTracing(bool isTracing) 262 { 263 glueData_.isTracing_ = isTracing; 264 } 265 266 void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor, 267 const RootBaseAndDerivedVisitor &derivedVisitor); 268 269 void IterateJitCodeMap(const JitCodeMapVisitor &updater); 270 271 void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor); 272 273 uintptr_t* PUBLIC_API ExpandHandleStorage(); 274 void PUBLIC_API ShrinkHandleStorage(int prevIndex); 275 void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const; 276 bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const; 277 278 void PUBLIC_API SetException(JSTaggedValue exception); 279 GetException()280 JSTaggedValue GetException() const 281 { 282 return glueData_.exception_; 283 } 284 HasPendingException()285 bool HasPendingException() const 286 { 287 return !glueData_.exception_.IsHole(); 288 } 289 290 void ClearException(); 291 SetGlobalObject(JSTaggedValue globalObject)292 void SetGlobalObject(JSTaggedValue globalObject) 293 { 294 glueData_.globalObject_ = globalObject; 295 } 296 GetGlobalEnv()297 const GlobalEnv *GetGlobalEnv() const 298 { 299 return glueData_.glueGlobalEnv_; 300 } 301 GlobalConstants()302 const GlobalEnvConstants *GlobalConstants() const 303 { 304 return glueData_.globalConst_; 305 } 306 SetGlobalConstants(const GlobalEnvConstants * constants)307 void SetGlobalConstants(const GlobalEnvConstants *constants) 308 { 309 glueData_.globalConst_ = const_cast<GlobalEnvConstants*>(constants); 310 } 311 GetBuiltinEntries()312 const BuiltinEntries GetBuiltinEntries() const 313 { 314 return glueData_.builtinEntries_; 315 } 316 GetBuiltinEntriesPointer()317 BuiltinEntries* GetBuiltinEntriesPointer() 318 { 319 return &glueData_.builtinEntries_; 320 } 321 GetArrayHClassIndexMap()322 const CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> &GetArrayHClassIndexMap() const 323 { 324 return arrayHClassIndexMap_; 325 } 326 GetCtorHclassEntries()327 const CMap<JSHClass *, GlobalIndex> &GetCtorHclassEntries() const 328 { 329 return ctorHclassEntries_; 330 } 331 332 void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver, StableArrayChangeKind changeKind); 333 IsStableArrayElementsGuardiansInvalid()334 bool IsStableArrayElementsGuardiansInvalid() const 335 { 336 return !glueData_.stableArrayElementsGuardians_; 337 } 338 339 void ResetGuardians(); 340 341 void SetInitialBuiltinHClass( 342 BuiltinTypeId type, JSHClass *builtinHClass, JSHClass *instanceHClass, 343 JSHClass *prototypeHClass, JSHClass *prototypeOfPrototypeHClass = nullptr, 344 JSHClass *extraHClass = nullptr); 345 346 void SetInitialBuiltinGlobalHClass(JSHClass *builtinHClass, GlobalIndex globalIndex); 347 348 JSHClass *GetBuiltinHClass(BuiltinTypeId type) const; 349 350 JSHClass *GetBuiltinInstanceHClass(BuiltinTypeId type) const; 351 JSHClass *GetBuiltinExtraHClass(BuiltinTypeId type) const; 352 JSHClass *GetArrayInstanceHClass(ElementsKind kind, bool isPrototype) const; 353 354 PUBLIC_API JSHClass *GetBuiltinPrototypeHClass(BuiltinTypeId type) const; 355 PUBLIC_API JSHClass *GetBuiltinPrototypeOfPrototypeHClass(BuiltinTypeId type) const; 356 357 static size_t GetBuiltinHClassOffset(BuiltinTypeId, bool isArch32); 358 359 static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId, bool isArch32); 360 GetBuiltinHClassEntries()361 const BuiltinHClassEntries &GetBuiltinHClassEntries() const 362 { 363 return glueData_.builtinHClassEntries_; 364 } 365 366 JSTaggedValue GetCurrentLexenv() const; 367 JSTaggedValue GetCurrentFunction() const; 368 RegisterRTInterface(size_t id,Address addr)369 void RegisterRTInterface(size_t id, Address addr) 370 { 371 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 372 glueData_.rtStubEntries_.Set(id, addr); 373 } 374 GetRTInterface(size_t id)375 Address GetRTInterface(size_t id) const 376 { 377 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 378 return glueData_.rtStubEntries_.Get(id); 379 } 380 GetFastStubEntry(uint32_t id)381 Address GetFastStubEntry(uint32_t id) const 382 { 383 return glueData_.coStubEntries_.Get(id); 384 } 385 SetFastStubEntry(size_t id,Address entry)386 void SetFastStubEntry(size_t id, Address entry) 387 { 388 glueData_.coStubEntries_.Set(id, entry); 389 } 390 GetBuiltinStubEntry(uint32_t id)391 Address GetBuiltinStubEntry(uint32_t id) const 392 { 393 return glueData_.builtinStubEntries_.Get(id); 394 } 395 SetBuiltinStubEntry(size_t id,Address entry)396 void SetBuiltinStubEntry(size_t id, Address entry) 397 { 398 glueData_.builtinStubEntries_.Set(id, entry); 399 } 400 GetBCStubEntry(uint32_t id)401 Address GetBCStubEntry(uint32_t id) const 402 { 403 return glueData_.bcStubEntries_.Get(id); 404 } 405 SetBCStubEntry(size_t id,Address entry)406 void SetBCStubEntry(size_t id, Address entry) 407 { 408 glueData_.bcStubEntries_.Set(id, entry); 409 } 410 GetBaselineStubEntry(uint32_t id)411 Address GetBaselineStubEntry(uint32_t id) const 412 { 413 return glueData_.baselineStubEntries_.Get(id); 414 } 415 SetBaselineStubEntry(size_t id,Address entry)416 void SetBaselineStubEntry(size_t id, Address entry) 417 { 418 glueData_.baselineStubEntries_.Set(id, entry); 419 } 420 SetBCDebugStubEntry(size_t id,Address entry)421 void SetBCDebugStubEntry(size_t id, Address entry) 422 { 423 glueData_.bcDebuggerStubEntries_.Set(id, entry); 424 } 425 GetBytecodeHandler()426 Address *GetBytecodeHandler() 427 { 428 return glueData_.bcStubEntries_.GetAddr(); 429 } 430 431 void PUBLIC_API CheckSwitchDebuggerBCStub(); 432 void CheckOrSwitchPGOStubs(); 433 void SwitchJitProfileStubs(bool isEnablePgo); 434 GetThreadId()435 ThreadId GetThreadId() const 436 { 437 return id_.load(std::memory_order_acquire); 438 } 439 440 void PostFork(); 441 GetCurrentThreadId()442 static ThreadId GetCurrentThreadId() 443 { 444 #if defined(ENABLE_FFRT_INTERFACES) 445 JSThread::ThreadId id = ffrt_this_task_get_id(); 446 if (id != 0) { 447 return id; 448 } else { 449 return os::thread::GetCurrentThreadId(); 450 } 451 #else 452 return os::thread::GetCurrentThreadId(); 453 #endif 454 } 455 456 void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor, GCKind gcKind = GCKind::LOCAL_GC); 457 458 void UpdateJitCodeMapReference(const WeakRootVisitor &visitor); 459 460 PUBLIC_API PropertiesCache *GetPropertiesCache() const; 461 GetMarkStatus()462 MarkStatus GetMarkStatus() const 463 { 464 return MarkStatusBits::Decode(glueData_.gcStateBitField_); 465 } 466 SetMarkStatus(MarkStatus status)467 void SetMarkStatus(MarkStatus status) 468 { 469 MarkStatusBits::Set(status, &glueData_.gcStateBitField_); 470 } 471 IsConcurrentMarkingOrFinished()472 bool IsConcurrentMarkingOrFinished() const 473 { 474 return !IsReadyToConcurrentMark(); 475 } 476 IsReadyToConcurrentMark()477 bool IsReadyToConcurrentMark() const 478 { 479 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 480 return status == MarkStatus::READY_TO_MARK; 481 } 482 IsMarking()483 bool IsMarking() const 484 { 485 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 486 return status == MarkStatus::MARKING; 487 } 488 IsMarkFinished()489 bool IsMarkFinished() const 490 { 491 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 492 return status == MarkStatus::MARK_FINISHED; 493 } 494 GetSharedMarkStatus()495 SharedMarkStatus GetSharedMarkStatus() const 496 { 497 return SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_); 498 } 499 SetSharedMarkStatus(SharedMarkStatus status)500 void SetSharedMarkStatus(SharedMarkStatus status) 501 { 502 SharedMarkStatusBits::Set(status, &glueData_.sharedGCStateBitField_); 503 } 504 IsSharedConcurrentMarkingOrFinished()505 bool IsSharedConcurrentMarkingOrFinished() const 506 { 507 auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_); 508 return status == SharedMarkStatus::CONCURRENT_MARKING_OR_FINISHED; 509 } 510 IsReadyToSharedConcurrentMark()511 bool IsReadyToSharedConcurrentMark() const 512 { 513 auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_); 514 return status == SharedMarkStatus::READY_TO_CONCURRENT_MARK; 515 } 516 SetPGOProfilerEnable(bool enable)517 void SetPGOProfilerEnable(bool enable) 518 { 519 PGOProfilerStatus status = 520 enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE; 521 SetInterruptValue<PGOStatusBits>(status); 522 } 523 IsPGOProfilerEnable()524 bool IsPGOProfilerEnable() const 525 { 526 auto status = PGOStatusBits::Decode(glueData_.interruptVector_); 527 return status == PGOProfilerStatus::PGO_PROFILER_ENABLE; 528 } 529 SetBCStubStatus(BCStubStatus status)530 void SetBCStubStatus(BCStubStatus status) 531 { 532 SetInterruptValue<BCStubStatusBits>(status); 533 } 534 GetBCStubStatus()535 BCStubStatus GetBCStubStatus() const 536 { 537 return BCStubStatusBits::Decode(glueData_.interruptVector_); 538 } 539 540 bool CheckSafepoint(); 541 542 void CheckAndPassActiveBarrier(); 543 544 bool PassSuspendBarrier(); 545 SetGetStackSignal(bool isParseStack)546 void SetGetStackSignal(bool isParseStack) 547 { 548 getStackSignal_ = isParseStack; 549 } 550 GetStackSignal()551 bool GetStackSignal() const 552 { 553 return getStackSignal_; 554 } 555 SetNeedProfiling(bool needProfiling)556 void SetNeedProfiling(bool needProfiling) 557 { 558 needProfiling_.store(needProfiling); 559 } 560 SetIsProfiling(bool isProfiling)561 void SetIsProfiling(bool isProfiling) 562 { 563 isProfiling_ = isProfiling; 564 } 565 GetIsProfiling()566 bool GetIsProfiling() const 567 { 568 return isProfiling_; 569 } 570 SetGcState(bool gcState)571 void SetGcState(bool gcState) 572 { 573 gcState_ = gcState; 574 } 575 GetGcState()576 bool GetGcState() const 577 { 578 return gcState_; 579 } 580 SetRuntimeState(bool runtimeState)581 void SetRuntimeState(bool runtimeState) 582 { 583 runtimeState_ = runtimeState; 584 } 585 GetRuntimeState()586 bool GetRuntimeState() const 587 { 588 return runtimeState_; 589 } 590 SetMainThread()591 bool SetMainThread() 592 { 593 return isMainThread_ = true; 594 } 595 IsMainThreadFast()596 bool IsMainThreadFast() const 597 { 598 return isMainThread_; 599 } 600 SetCpuProfileName(std::string & profileName)601 void SetCpuProfileName(std::string &profileName) 602 { 603 profileName_ = profileName; 604 } 605 EnableAsmInterpreter()606 void EnableAsmInterpreter() 607 { 608 isAsmInterpreter_ = true; 609 } 610 IsAsmInterpreter()611 bool IsAsmInterpreter() const 612 { 613 return isAsmInterpreter_; 614 } 615 GetVmThreadControl()616 VmThreadControl *GetVmThreadControl() const 617 { 618 return vmThreadControl_; 619 } 620 SetEnableStackSourceFile(bool value)621 void SetEnableStackSourceFile(bool value) 622 { 623 enableStackSourceFile_ = value; 624 } 625 GetEnableStackSourceFile()626 bool GetEnableStackSourceFile() const 627 { 628 return enableStackSourceFile_; 629 } 630 SetEnableLazyBuiltins(bool value)631 void SetEnableLazyBuiltins(bool value) 632 { 633 enableLazyBuiltins_ = value; 634 } 635 GetEnableLazyBuiltins()636 bool GetEnableLazyBuiltins() const 637 { 638 return enableLazyBuiltins_; 639 } 640 SetReadyForGCIterating(bool flag)641 void SetReadyForGCIterating(bool flag) 642 { 643 readyForGCIterating_ = flag; 644 } 645 ReadyForGCIterating()646 bool ReadyForGCIterating() const 647 { 648 return readyForGCIterating_; 649 } 650 GetGlueDataOffset()651 static constexpr size_t GetGlueDataOffset() 652 { 653 return MEMBER_OFFSET(JSThread, glueData_); 654 } 655 GetGlueAddr()656 uintptr_t GetGlueAddr() const 657 { 658 return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset(); 659 } 660 GlueToJSThread(uintptr_t glue)661 static JSThread *GlueToJSThread(uintptr_t glue) 662 { 663 // very careful to modify here 664 return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset()); 665 } 666 SetCheckSafePointStatus()667 void SetCheckSafePointStatus() 668 { 669 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 670 SetInterruptValue<CheckSafePointBit>(true); 671 } 672 ResetCheckSafePointStatus()673 void ResetCheckSafePointStatus() 674 { 675 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 676 SetInterruptValue<CheckSafePointBit>(false); 677 } 678 SetVMNeedSuspension(bool flag)679 void SetVMNeedSuspension(bool flag) 680 { 681 SetInterruptValue<VMNeedSuspensionBit>(flag); 682 } 683 VMNeedSuspension()684 bool VMNeedSuspension() 685 { 686 return VMNeedSuspensionBit::Decode(glueData_.interruptVector_); 687 } 688 SetVMSuspended(bool flag)689 void SetVMSuspended(bool flag) 690 { 691 SetInterruptValue<VMHasSuspendedBit>(flag); 692 } 693 IsVMSuspended()694 bool IsVMSuspended() 695 { 696 return VMHasSuspendedBit::Decode(glueData_.interruptVector_); 697 } 698 HasTerminationRequest()699 bool HasTerminationRequest() const 700 { 701 return needTermination_; 702 } 703 SetTerminationRequest(bool flag)704 void SetTerminationRequest(bool flag) 705 { 706 needTermination_ = flag; 707 } 708 SetVMTerminated(bool flag)709 void SetVMTerminated(bool flag) 710 { 711 hasTerminated_ = flag; 712 } 713 HasTerminated()714 bool HasTerminated() const 715 { 716 return hasTerminated_; 717 } 718 719 void TerminateExecution(); 720 SetInstallMachineCode(bool flag)721 void SetInstallMachineCode(bool flag) 722 { 723 SetInterruptValue<InstallMachineCodeBit>(flag); 724 } 725 HasInstallMachineCode()726 bool HasInstallMachineCode() const 727 { 728 return InstallMachineCodeBit::Decode(glueData_.interruptVector_); 729 } 730 GetCurrentStackPosition()731 static uintptr_t GetCurrentStackPosition() 732 { 733 return reinterpret_cast<uintptr_t>(__builtin_frame_address(0)); 734 } 735 736 bool IsLegalAsmSp(uintptr_t sp) const; 737 738 bool IsLegalThreadSp(uintptr_t sp) const; 739 740 bool IsLegalSp(uintptr_t sp) const; 741 SetCheckAndCallEnterState(bool state)742 void SetCheckAndCallEnterState(bool state) 743 { 744 finalizationCheckState_ = state; 745 } 746 GetCheckAndCallEnterState()747 bool GetCheckAndCallEnterState() const 748 { 749 return finalizationCheckState_; 750 } 751 GetStackStart()752 uint64_t GetStackStart() const 753 { 754 return glueData_.stackStart_; 755 } 756 GetStackLimit()757 uint64_t GetStackLimit() const 758 { 759 return glueData_.stackLimit_; 760 } 761 GetGlueGlobalEnv()762 GlobalEnv *GetGlueGlobalEnv() 763 { 764 return glueData_.glueGlobalEnv_; 765 } 766 SetGlueGlobalEnv(GlobalEnv * global)767 void SetGlueGlobalEnv(GlobalEnv *global) 768 { 769 ASSERT(global != nullptr); 770 glueData_.glueGlobalEnv_ = global; 771 } 772 NewGlobalHandle(JSTaggedType value)773 inline uintptr_t NewGlobalHandle(JSTaggedType value) 774 { 775 return newGlobalHandle_(value); 776 } 777 DisposeGlobalHandle(uintptr_t nodeAddr)778 inline void DisposeGlobalHandle(uintptr_t nodeAddr) 779 { 780 disposeGlobalHandle_(nodeAddr); 781 } 782 783 inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr, 784 WeakClearCallback nativeFinalizeCallBack = nullptr) 785 { 786 return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack); 787 } 788 ClearWeak(uintptr_t nodeAddr)789 inline uintptr_t ClearWeak(uintptr_t nodeAddr) 790 { 791 return clearWeak_(nodeAddr); 792 } 793 IsWeak(uintptr_t addr)794 inline bool IsWeak(uintptr_t addr) const 795 { 796 return isWeak_(addr); 797 } 798 EnableCrossThreadExecution()799 void EnableCrossThreadExecution() 800 { 801 glueData_.allowCrossThreadExecution_ = true; 802 } 803 IsCrossThreadExecutionEnable()804 bool IsCrossThreadExecutionEnable() const 805 { 806 return glueData_.allowCrossThreadExecution_; 807 } 808 IsFrameDropped()809 bool IsFrameDropped() 810 { 811 return glueData_.isFrameDropped_; 812 } 813 SetFrameDroppedState()814 void SetFrameDroppedState() 815 { 816 glueData_.isFrameDropped_ = true; 817 } 818 ResetFrameDroppedState()819 void ResetFrameDroppedState() 820 { 821 glueData_.isFrameDropped_ = false; 822 } 823 IsEntryFrameDroppedTrue()824 bool IsEntryFrameDroppedTrue() 825 { 826 return glueData_.entryFrameDroppedState_ == FrameDroppedState::StateTrue; 827 } 828 IsEntryFrameDroppedPending()829 bool IsEntryFrameDroppedPending() 830 { 831 return glueData_.entryFrameDroppedState_ == FrameDroppedState::StatePending; 832 } 833 SetEntryFrameDroppedState()834 void SetEntryFrameDroppedState() 835 { 836 glueData_.entryFrameDroppedState_ = FrameDroppedState::StateTrue; 837 } 838 ResetEntryFrameDroppedState()839 void ResetEntryFrameDroppedState() 840 { 841 glueData_.entryFrameDroppedState_ = FrameDroppedState::StateFalse; 842 } 843 PendingEntryFrameDroppedState()844 void PendingEntryFrameDroppedState() 845 { 846 glueData_.entryFrameDroppedState_ = FrameDroppedState::StatePending; 847 } 848 IsDebugMode()849 bool IsDebugMode() 850 { 851 return glueData_.isDebugMode_; 852 } 853 SetDebugModeState()854 void SetDebugModeState() 855 { 856 glueData_.isDebugMode_ = true; 857 } 858 ResetDebugModeState()859 void ResetDebugModeState() 860 { 861 glueData_.isDebugMode_ = false; 862 } 863 864 template<typename T, typename V> SetInterruptValue(V value)865 void SetInterruptValue(V value) 866 { 867 volatile auto interruptValue = 868 reinterpret_cast<volatile std::atomic<uint64_t> *>(&glueData_.interruptVector_); 869 uint64_t oldValue = interruptValue->load(std::memory_order_relaxed); 870 auto newValue = oldValue; 871 do { 872 newValue = oldValue; 873 T::Set(value, &newValue); 874 } while (!std::atomic_compare_exchange_strong_explicit(interruptValue, &oldValue, newValue, 875 std::memory_order_release, 876 std::memory_order_relaxed)); 877 } 878 879 void InvokeWeakNodeFreeGlobalCallBack(); 880 void InvokeSharedNativePointerCallbacks(); 881 void InvokeWeakNodeNativeFinalizeCallback(); 882 bool IsStartGlobalLeakCheck() const; 883 bool EnableGlobalObjectLeakCheck() const; 884 bool EnableGlobalPrimitiveLeakCheck() const; 885 void WriteToStackTraceFd(std::ostringstream &buffer) const; 886 void SetStackTraceFd(int32_t fd); 887 void CloseStackTraceFd(); IncreaseGlobalNumberCount()888 uint32_t IncreaseGlobalNumberCount() 889 { 890 return ++globalNumberCount_; 891 } 892 SetPropertiesGrowStep(uint32_t step)893 void SetPropertiesGrowStep(uint32_t step) 894 { 895 glueData_.propertiesGrowStep_ = step; 896 } 897 GetPropertiesGrowStep()898 uint32_t GetPropertiesGrowStep() const 899 { 900 return glueData_.propertiesGrowStep_; 901 } 902 SetRandomStatePtr(uint64_t * ptr)903 void SetRandomStatePtr(uint64_t *ptr) 904 { 905 glueData_.randomStatePtr_ = reinterpret_cast<uintptr_t>(ptr); 906 } 907 SetTaskInfo(uintptr_t taskInfo)908 void SetTaskInfo(uintptr_t taskInfo) 909 { 910 glueData_.taskInfo_ = taskInfo; 911 } 912 GetTaskInfo()913 uintptr_t GetTaskInfo() const 914 { 915 return glueData_.taskInfo_; 916 } 917 918 void SetJitCodeMap(JSTaggedType exception, MachineCode* machineCode, std::string &methodName, uintptr_t offset); 919 GetJitCodeMaps()920 std::map<JSTaggedType, JitCodeVector*> &GetJitCodeMaps() 921 { 922 return jitCodeMaps_; 923 } 924 925 struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(), 926 BCStubEntries, 927 JSTaggedValue, 928 JSTaggedValue, 929 base::AlignedBool, 930 base::AlignedPointer, 931 base::AlignedPointer, 932 base::AlignedPointer, 933 base::AlignedPointer, 934 base::AlignedPointer, 935 base::AlignedPointer, 936 base::AlignedPointer, 937 base::AlignedPointer, 938 base::AlignedPointer, 939 RTStubEntries, 940 COStubEntries, 941 BuiltinStubEntries, 942 BuiltinHClassEntries, 943 BCDebuggerStubEntries, 944 BaselineStubEntries, 945 base::AlignedUint64, 946 base::AlignedUint64, 947 base::AlignedPointer, 948 base::AlignedUint64, 949 base::AlignedUint64, 950 base::AlignedPointer, 951 base::AlignedPointer, 952 base::AlignedUint64, 953 base::AlignedUint64, 954 JSTaggedValue, 955 base::AlignedBool, 956 base::AlignedBool, 957 base::AlignedUint32, 958 JSTaggedValue, 959 base::AlignedPointer, 960 BuiltinEntries, 961 base::AlignedBool, 962 base::AlignedPointer, 963 base::AlignedPointer, 964 base::AlignedPointer, 965 base::AlignedUint32, 966 base::AlignedBool> { 967 enum class Index : size_t { 968 BcStubEntriesIndex = 0, 969 ExceptionIndex, 970 GlobalObjIndex, 971 StableArrayElementsGuardiansIndex, 972 CurrentFrameIndex, 973 LeaveFrameIndex, 974 LastFpIndex, 975 NewSpaceAllocationTopAddressIndex, 976 NewSpaceAllocationEndAddressIndex, 977 SOldSpaceAllocationTopAddressIndex, 978 SOldSpaceAllocationEndAddressIndex, 979 SNonMovableSpaceAllocationTopAddressIndex, 980 SNonMovableSpaceAllocationEndAddressIndex, 981 RTStubEntriesIndex, 982 COStubEntriesIndex, 983 BuiltinsStubEntriesIndex, 984 BuiltinHClassEntriesIndex, 985 BcDebuggerStubEntriesIndex, 986 BaselineStubEntriesIndex, 987 GCStateBitFieldIndex, 988 SharedGCStateBitFieldIndex, 989 FrameBaseIndex, 990 StackStartIndex, 991 StackLimitIndex, 992 GlueGlobalEnvIndex, 993 GlobalConstIndex, 994 AllowCrossThreadExecutionIndex, 995 InterruptVectorIndex, 996 IsStartHeapSamplingIndex, 997 IsDebugModeIndex, 998 IsFrameDroppedIndex, 999 PropertiesGrowStepIndex, 1000 EntryFrameDroppedStateIndex, 1001 CurrentContextIndex, 1002 BuiltinEntriesIndex, 1003 IsTracingIndex, 1004 UnsharedConstpoolsIndex, 1005 RandomStatePtrIndex, 1006 StateAndFlagsIndex, 1007 TaskInfoIndex, 1008 IsEnableElementsKindIndex, 1009 NumOfMembers 1010 }; 1011 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes); 1012 GetExceptionOffsetGlueData1013 static size_t GetExceptionOffset(bool isArch32) 1014 { 1015 return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32); 1016 } 1017 GetGlobalObjOffsetGlueData1018 static size_t GetGlobalObjOffset(bool isArch32) 1019 { 1020 return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32); 1021 } 1022 GetStableArrayElementsGuardiansOffsetGlueData1023 static size_t GetStableArrayElementsGuardiansOffset(bool isArch32) 1024 { 1025 return GetOffset<static_cast<size_t>(Index::StableArrayElementsGuardiansIndex)>(isArch32); 1026 } 1027 GetGlobalConstOffsetGlueData1028 static size_t GetGlobalConstOffset(bool isArch32) 1029 { 1030 return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32); 1031 } 1032 GetGCStateBitFieldOffsetGlueData1033 static size_t GetGCStateBitFieldOffset(bool isArch32) 1034 { 1035 return GetOffset<static_cast<size_t>(Index::GCStateBitFieldIndex)>(isArch32); 1036 } 1037 GetSharedGCStateBitFieldOffsetGlueData1038 static size_t GetSharedGCStateBitFieldOffset(bool isArch32) 1039 { 1040 return GetOffset<static_cast<size_t>(Index::SharedGCStateBitFieldIndex)>(isArch32); 1041 } 1042 GetCurrentFrameOffsetGlueData1043 static size_t GetCurrentFrameOffset(bool isArch32) 1044 { 1045 return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32); 1046 } 1047 GetLeaveFrameOffsetGlueData1048 static size_t GetLeaveFrameOffset(bool isArch32) 1049 { 1050 return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32); 1051 } 1052 GetLastFpOffsetGlueData1053 static size_t GetLastFpOffset(bool isArch32) 1054 { 1055 return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32); 1056 } 1057 GetNewSpaceAllocationTopAddressOffsetGlueData1058 static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32) 1059 { 1060 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32); 1061 } 1062 GetNewSpaceAllocationEndAddressOffsetGlueData1063 static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32) 1064 { 1065 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32); 1066 } 1067 GetSOldSpaceAllocationTopAddressOffsetGlueData1068 static size_t GetSOldSpaceAllocationTopAddressOffset(bool isArch32) 1069 { 1070 return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationTopAddressIndex)>(isArch32); 1071 } 1072 GetSOldSpaceAllocationEndAddressOffsetGlueData1073 static size_t GetSOldSpaceAllocationEndAddressOffset(bool isArch32) 1074 { 1075 return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationEndAddressIndex)>(isArch32); 1076 } 1077 GetSNonMovableSpaceAllocationTopAddressOffsetGlueData1078 static size_t GetSNonMovableSpaceAllocationTopAddressOffset(bool isArch32) 1079 { 1080 return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationTopAddressIndex)>(isArch32); 1081 } 1082 GetSNonMovableSpaceAllocationEndAddressOffsetGlueData1083 static size_t GetSNonMovableSpaceAllocationEndAddressOffset(bool isArch32) 1084 { 1085 return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationEndAddressIndex)>(isArch32); 1086 } 1087 GetBCStubEntriesOffsetGlueData1088 static size_t GetBCStubEntriesOffset(bool isArch32) 1089 { 1090 return GetOffset<static_cast<size_t>(Index::BcStubEntriesIndex)>(isArch32); 1091 } 1092 GetRTStubEntriesOffsetGlueData1093 static size_t GetRTStubEntriesOffset(bool isArch32) 1094 { 1095 return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32); 1096 } 1097 GetCOStubEntriesOffsetGlueData1098 static size_t GetCOStubEntriesOffset(bool isArch32) 1099 { 1100 return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32); 1101 } 1102 GetBaselineStubEntriesOffsetGlueData1103 static size_t GetBaselineStubEntriesOffset(bool isArch32) 1104 { 1105 return GetOffset<static_cast<size_t>(Index::BaselineStubEntriesIndex)>(isArch32); 1106 } 1107 GetBuiltinsStubEntriesOffsetGlueData1108 static size_t GetBuiltinsStubEntriesOffset(bool isArch32) 1109 { 1110 return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32); 1111 } 1112 GetBuiltinHClassEntriesOffsetGlueData1113 static size_t GetBuiltinHClassEntriesOffset(bool isArch32) 1114 { 1115 return GetOffset<static_cast<size_t>(Index::BuiltinHClassEntriesIndex)>(isArch32); 1116 } 1117 GetBuiltinHClassOffsetGlueData1118 static size_t GetBuiltinHClassOffset(BuiltinTypeId type, bool isArch32) 1119 { 1120 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetBuiltinHClassOffset(type); 1121 } 1122 GetBuiltinInstanceHClassOffsetGlueData1123 static size_t GetBuiltinInstanceHClassOffset(BuiltinTypeId type, bool isArch32) 1124 { 1125 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetInstanceHClassOffset(type); 1126 } 1127 GetBuiltinPrototypeHClassOffsetGlueData1128 static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId type, bool isArch32) 1129 { 1130 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetPrototypeHClassOffset(type); 1131 } 1132 GetBuiltinPrototypeOfPrototypeHClassOffsetGlueData1133 static size_t GetBuiltinPrototypeOfPrototypeHClassOffset(BuiltinTypeId type, bool isArch32) 1134 { 1135 return GetBuiltinHClassEntriesOffset(isArch32) + 1136 BuiltinHClassEntries::GetPrototypeOfPrototypeHClassOffset(type); 1137 } 1138 GetBuiltinExtraHClassOffsetGlueData1139 static size_t GetBuiltinExtraHClassOffset(BuiltinTypeId type, bool isArch32) 1140 { 1141 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetExtraHClassOffset(type); 1142 } 1143 GetBCDebuggerStubEntriesOffsetGlueData1144 static size_t GetBCDebuggerStubEntriesOffset(bool isArch32) 1145 { 1146 return GetOffset<static_cast<size_t>(Index::BcDebuggerStubEntriesIndex)>(isArch32); 1147 } 1148 GetFrameBaseOffsetGlueData1149 static size_t GetFrameBaseOffset(bool isArch32) 1150 { 1151 return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32); 1152 } 1153 GetStackLimitOffsetGlueData1154 static size_t GetStackLimitOffset(bool isArch32) 1155 { 1156 return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32); 1157 } 1158 GetGlueGlobalEnvOffsetGlueData1159 static size_t GetGlueGlobalEnvOffset(bool isArch32) 1160 { 1161 return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32); 1162 } 1163 GetAllowCrossThreadExecutionOffsetGlueData1164 static size_t GetAllowCrossThreadExecutionOffset(bool isArch32) 1165 { 1166 return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32); 1167 } 1168 GetInterruptVectorOffsetGlueData1169 static size_t GetInterruptVectorOffset(bool isArch32) 1170 { 1171 return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32); 1172 } 1173 GetIsStartHeapSamplingOffsetGlueData1174 static size_t GetIsStartHeapSamplingOffset(bool isArch32) 1175 { 1176 return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32); 1177 } 1178 GetIsDebugModeOffsetGlueData1179 static size_t GetIsDebugModeOffset(bool isArch32) 1180 { 1181 return GetOffset<static_cast<size_t>(Index::IsDebugModeIndex)>(isArch32); 1182 } 1183 GetIsFrameDroppedOffsetGlueData1184 static size_t GetIsFrameDroppedOffset(bool isArch32) 1185 { 1186 return GetOffset<static_cast<size_t>(Index::IsFrameDroppedIndex)>(isArch32); 1187 } 1188 GetPropertiesGrowStepOffsetGlueData1189 static size_t GetPropertiesGrowStepOffset(bool isArch32) 1190 { 1191 return GetOffset<static_cast<size_t>(Index::PropertiesGrowStepIndex)>(isArch32); 1192 } 1193 GetEntryFrameDroppedStateOffsetGlueData1194 static size_t GetEntryFrameDroppedStateOffset(bool isArch32) 1195 { 1196 return GetOffset<static_cast<size_t>(Index::EntryFrameDroppedStateIndex)>(isArch32); 1197 } 1198 GetCurrentContextOffsetGlueData1199 static size_t GetCurrentContextOffset(bool isArch32) 1200 { 1201 return GetOffset<static_cast<size_t>(Index::CurrentContextIndex)>(isArch32); 1202 } 1203 GetBuiltinEntriesOffsetGlueData1204 static size_t GetBuiltinEntriesOffset(bool isArch32) 1205 { 1206 return GetOffset<static_cast<size_t>(Index::BuiltinEntriesIndex)>(isArch32); 1207 } 1208 GetIsTracingOffsetGlueData1209 static size_t GetIsTracingOffset(bool isArch32) 1210 { 1211 return GetOffset<static_cast<size_t>(Index::IsTracingIndex)>(isArch32); 1212 } 1213 GetUnSharedConstpoolsOffsetGlueData1214 static size_t GetUnSharedConstpoolsOffset(bool isArch32) 1215 { 1216 return GetOffset<static_cast<size_t>(Index::UnsharedConstpoolsIndex)>(isArch32); 1217 } 1218 GetStateAndFlagsOffsetGlueData1219 static size_t GetStateAndFlagsOffset(bool isArch32) 1220 { 1221 return GetOffset<static_cast<size_t>(Index::StateAndFlagsIndex)>(isArch32); 1222 } 1223 GetRandomStatePtrOffsetGlueData1224 static size_t GetRandomStatePtrOffset(bool isArch32) 1225 { 1226 return GetOffset<static_cast<size_t>(Index::RandomStatePtrIndex)>(isArch32); 1227 } 1228 GetTaskInfoOffsetGlueData1229 static size_t GetTaskInfoOffset(bool isArch32) 1230 { 1231 return GetOffset<static_cast<size_t>(Index::TaskInfoIndex)>(isArch32); 1232 } 1233 GetIsEnableElementsKindOffsetGlueData1234 static size_t GetIsEnableElementsKindOffset(bool isArch32) 1235 { 1236 return GetOffset<static_cast<size_t>(Index::IsEnableElementsKindIndex)>(isArch32); 1237 } 1238 1239 alignas(EAS) BCStubEntries bcStubEntries_ {}; 1240 alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()}; 1241 alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()}; 1242 alignas(EAS) bool stableArrayElementsGuardians_ {true}; 1243 alignas(EAS) JSTaggedType *currentFrame_ {nullptr}; 1244 alignas(EAS) JSTaggedType *leaveFrame_ {nullptr}; 1245 alignas(EAS) JSTaggedType *lastFp_ {nullptr}; 1246 alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr}; 1247 alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr}; 1248 alignas(EAS) const uintptr_t *sOldSpaceAllocationTopAddress_ {nullptr}; 1249 alignas(EAS) const uintptr_t *sOldSpaceAllocationEndAddress_ {nullptr}; 1250 alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationTopAddress_ {nullptr}; 1251 alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationEndAddress_ {nullptr}; 1252 alignas(EAS) RTStubEntries rtStubEntries_ {}; 1253 alignas(EAS) COStubEntries coStubEntries_ {}; 1254 alignas(EAS) BuiltinStubEntries builtinStubEntries_ {}; 1255 alignas(EAS) BuiltinHClassEntries builtinHClassEntries_ {}; 1256 alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_ {}; 1257 alignas(EAS) BaselineStubEntries baselineStubEntries_ {}; 1258 alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL}; 1259 alignas(EAS) volatile uint64_t sharedGCStateBitField_ {0ULL}; 1260 alignas(EAS) JSTaggedType *frameBase_ {nullptr}; 1261 alignas(EAS) uint64_t stackStart_ {0}; 1262 alignas(EAS) uint64_t stackLimit_ {0}; 1263 alignas(EAS) GlobalEnv *glueGlobalEnv_ {nullptr}; 1264 alignas(EAS) GlobalEnvConstants *globalConst_ {nullptr}; 1265 alignas(EAS) bool allowCrossThreadExecution_ {false}; 1266 alignas(EAS) volatile uint64_t interruptVector_ {0}; 1267 alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()}; 1268 alignas(EAS) bool isDebugMode_ {false}; 1269 alignas(EAS) bool isFrameDropped_ {false}; 1270 alignas(EAS) uint32_t propertiesGrowStep_ {JSObjectResizingStrategy::PROPERTIES_GROW_SIZE}; 1271 alignas(EAS) uint64_t entryFrameDroppedState_ {FrameDroppedState::StateFalse}; 1272 alignas(EAS) EcmaContext *currentContext_ {nullptr}; 1273 alignas(EAS) BuiltinEntries builtinEntries_ {}; 1274 alignas(EAS) bool isTracing_ {false}; 1275 alignas(EAS) uintptr_t unsharedConstpools_ {0}; 1276 alignas(EAS) uintptr_t randomStatePtr_ {0}; 1277 alignas(EAS) ThreadStateAndFlags stateAndFlags_ {}; 1278 alignas(EAS) uintptr_t taskInfo_ {0}; 1279 alignas(EAS) bool isEnableElementsKind_ {false}; 1280 }; 1281 STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64); 1282 1283 void PushContext(EcmaContext *context); 1284 void PopContext(); 1285 GetCurrentEcmaContext()1286 EcmaContext *GetCurrentEcmaContext() const 1287 { 1288 return glueData_.currentContext_; 1289 } 1290 GetSingleCharTable()1291 JSTaggedValue GetSingleCharTable() const 1292 { 1293 ASSERT(glueData_.globalConst_->GetSingleCharTable() != JSTaggedValue::Hole()); 1294 return glueData_.globalConst_->GetSingleCharTable(); 1295 } 1296 1297 void SwitchCurrentContext(EcmaContext *currentContext, bool isInIterate = false); 1298 GetEcmaContexts()1299 CVector<EcmaContext *> GetEcmaContexts() 1300 { 1301 return contexts_; 1302 } 1303 1304 bool IsPropertyCacheCleared() const; 1305 1306 bool EraseContext(EcmaContext *context); 1307 void ClearContextCachedConstantPool(); 1308 1309 const GlobalEnvConstants *GetFirstGlobalConst() const; 1310 bool IsAllContextsInitialized() const; 1311 bool IsReadyToUpdateDetector() const; 1312 Area *GetOrCreateRegExpCache(); 1313 1314 void InitializeBuiltinObject(const std::string& key); 1315 void InitializeBuiltinObject(); 1316 FullMarkRequest()1317 bool FullMarkRequest() const 1318 { 1319 return fullMarkRequest_; 1320 } 1321 SetFullMarkRequest()1322 void SetFullMarkRequest() 1323 { 1324 fullMarkRequest_ = true; 1325 } 1326 ResetFullMarkRequest()1327 void ResetFullMarkRequest() 1328 { 1329 fullMarkRequest_ = false; 1330 } 1331 SetProcessingLocalToSharedRset(bool processing)1332 void SetProcessingLocalToSharedRset(bool processing) 1333 { 1334 processingLocalToSharedRset_ = processing; 1335 } 1336 IsProcessingLocalToSharedRset()1337 bool IsProcessingLocalToSharedRset() const 1338 { 1339 return processingLocalToSharedRset_; 1340 } 1341 IsThreadSafe()1342 inline bool IsThreadSafe() const 1343 { 1344 return IsMainThread() || HasSuspendRequest(); 1345 } 1346 IsSuspended()1347 bool IsSuspended() const 1348 { 1349 bool f = ReadFlag(ThreadFlag::SUSPEND_REQUEST); 1350 bool s = (GetState() != ThreadState::RUNNING); 1351 return f && s; 1352 } 1353 HasSuspendRequest()1354 inline bool HasSuspendRequest() const 1355 { 1356 return ReadFlag(ThreadFlag::SUSPEND_REQUEST); 1357 } 1358 CheckSafepointIfSuspended()1359 void CheckSafepointIfSuspended() 1360 { 1361 if (HasSuspendRequest()) { 1362 WaitSuspension(); 1363 } 1364 } 1365 IsInSuspendedState()1366 bool IsInSuspendedState() const 1367 { 1368 return GetState() == ThreadState::IS_SUSPENDED; 1369 } 1370 IsInRunningState()1371 bool IsInRunningState() const 1372 { 1373 return GetState() == ThreadState::RUNNING; 1374 } 1375 1376 bool IsInRunningStateOrProfiling() const; 1377 GetState()1378 ThreadState GetState() const 1379 { 1380 uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire); 1381 return static_cast<enum ThreadState>(stateAndFlags >> THREAD_STATE_OFFSET); 1382 } 1383 void PUBLIC_API UpdateState(ThreadState newState); 1384 void SuspendThread(bool internalSuspend, SuspendBarrier* barrier = nullptr); 1385 void ResumeThread(bool internalSuspend); 1386 void WaitSuspension(); 1387 static bool IsMainThread(); 1388 PUBLIC_API void ManagedCodeBegin(); 1389 PUBLIC_API void ManagedCodeEnd(); 1390 #ifndef NDEBUG 1391 bool IsInManagedState() const; 1392 MutatorLock::MutatorLockState GetMutatorLockState() const; 1393 void SetMutatorLockState(MutatorLock::MutatorLockState newState); 1394 #endif SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback & callback)1395 void SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback &callback) 1396 { 1397 finalizeTaskCallback_ = callback; 1398 } 1399 GetJobId()1400 uint64_t GetJobId() 1401 { 1402 if (jobId_ == UINT64_MAX) { 1403 jobId_ = 0; 1404 } 1405 return ++jobId_; 1406 } 1407 SetAsyncCleanTaskCallback(const NativePointerTaskCallback & callback)1408 void SetAsyncCleanTaskCallback(const NativePointerTaskCallback &callback) 1409 { 1410 asyncCleanTaskCb_ = callback; 1411 } 1412 GetAsyncCleanTaskCallback()1413 NativePointerTaskCallback GetAsyncCleanTaskCallback() const 1414 { 1415 return asyncCleanTaskCb_; 1416 } 1417 1418 static void RegisterThread(JSThread *jsThread); 1419 1420 static void UnregisterThread(JSThread *jsThread); 1421 IsJSThread()1422 bool IsJSThread() const 1423 { 1424 return threadType_ == ThreadType::JS_THREAD; 1425 } 1426 IsJitThread()1427 bool IsJitThread() const 1428 { 1429 return threadType_ == ThreadType::JIT_THREAD; 1430 } 1431 IsDaemonThread()1432 bool IsDaemonThread() const 1433 { 1434 return threadType_ == ThreadType::DAEMON_THREAD; 1435 } 1436 1437 // Daemon_Thread and JS_Thread have some difference in transition, for example, when transition to running, 1438 // JS_Thread may take some local_gc actions, but Daemon_Thread do not need. 1439 void TransferDaemonThreadToRunning(); 1440 GetJitLock()1441 RecursiveMutex *GetJitLock() 1442 { 1443 return &jitMutex_; 1444 } 1445 GetProfileTypeAccessorLock()1446 RecursiveMutex &GetProfileTypeAccessorLock() 1447 { 1448 return profileTypeAccessorLockMutex_; 1449 } 1450 SetMachineCodeLowMemory(bool isLow)1451 void SetMachineCodeLowMemory(bool isLow) 1452 { 1453 machineCodeLowMemory_ = isLow; 1454 } 1455 IsMachineCodeLowMemory()1456 bool IsMachineCodeLowMemory() 1457 { 1458 return machineCodeLowMemory_; 1459 } 1460 GetEnv()1461 void *GetEnv() const 1462 { 1463 return env_; 1464 } 1465 SetEnv(void * env)1466 void SetEnv(void *env) 1467 { 1468 env_ = env; 1469 } 1470 SetIsInConcurrentScope(bool flag)1471 void SetIsInConcurrentScope(bool flag) 1472 { 1473 isInConcurrentScope_ = flag; 1474 } 1475 IsInConcurrentScope()1476 bool IsInConcurrentScope() 1477 { 1478 return isInConcurrentScope_; 1479 } 1480 EnableEdenGCBarriers()1481 void EnableEdenGCBarriers() 1482 { 1483 auto setValueStub = GetFastStubEntry(kungfu::CommonStubCSigns::SetValueWithEdenBarrier); 1484 SetFastStubEntry(kungfu::CommonStubCSigns::SetValueWithBarrier, setValueStub); 1485 auto markStub = GetRTInterface(kungfu::RuntimeStubCSigns::ID_MarkingBarrierWithEden); 1486 RegisterRTInterface(kungfu::RuntimeStubCSigns::ID_MarkingBarrier, markStub); 1487 auto setNotShareValueStub = GetFastStubEntry(kungfu::CommonStubCSigns::SetNonSValueWithEdenBarrier); 1488 SetFastStubEntry(kungfu::CommonStubCSigns::SetNonSValueWithBarrier, setNotShareValueStub); 1489 auto asmCheckStub = GetRTInterface(kungfu::RuntimeStubCSigns::ID_ASMWriteBarrierWithEden); 1490 RegisterRTInterface(kungfu::RuntimeStubCSigns::ID_ASMFastWriteBarrier, asmCheckStub); 1491 } 1492 GetDateUtils()1493 DateUtils *GetDateUtils() const 1494 { 1495 return dateUtils_; 1496 } 1497 1498 #ifndef NDEBUG LaunchSuspendAll()1499 inline void LaunchSuspendAll() 1500 { 1501 launchedSuspendAll_ = true; 1502 } 1503 HasLaunchedSuspendAll()1504 inline bool HasLaunchedSuspendAll() const 1505 { 1506 return launchedSuspendAll_; 1507 } 1508 CompleteSuspendAll()1509 inline void CompleteSuspendAll() 1510 { 1511 launchedSuspendAll_ = false; 1512 } 1513 #endif 1514 1515 protected: SetThreadId()1516 void SetThreadId() 1517 { 1518 id_.store(JSThread::GetCurrentThreadId(), std::memory_order_release); 1519 } 1520 1521 // When call EcmaVM::PreFork(), the std::thread for Daemon_Thread is finished, but the Daemon_Thread instance 1522 // is still alive, and need to reset ThreadId to 0. ResetThreadId()1523 void ResetThreadId() 1524 { 1525 id_.store(0, std::memory_order_release); 1526 } 1527 private: 1528 NO_COPY_SEMANTIC(JSThread); 1529 NO_MOVE_SEMANTIC(JSThread); SetGlobalConst(GlobalEnvConstants * globalConst)1530 void SetGlobalConst(GlobalEnvConstants *globalConst) 1531 { 1532 glueData_.globalConst_ = globalConst; 1533 } SetCurrentEcmaContext(EcmaContext * context)1534 void SetCurrentEcmaContext(EcmaContext *context) 1535 { 1536 glueData_.currentContext_ = context; 1537 } 1538 SetArrayHClassIndexMap(const CMap<ElementsKind,std::pair<ConstantIndex,ConstantIndex>> & map)1539 void SetArrayHClassIndexMap(const CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> &map) 1540 { 1541 arrayHClassIndexMap_ = map; 1542 } 1543 1544 void TransferFromRunningToSuspended(ThreadState newState); 1545 1546 void TransferToRunning(); 1547 1548 inline void StoreState(ThreadState newState); 1549 1550 void StoreRunningState(ThreadState newState); 1551 1552 void StoreSuspendedState(ThreadState newState); 1553 ReadFlag(ThreadFlag flag)1554 bool ReadFlag(ThreadFlag flag) const 1555 { 1556 uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire); 1557 uint16_t flags = (stateAndFlags & THREAD_FLAGS_MASK); 1558 return (flags & static_cast<uint16_t>(flag)) != 0; 1559 } 1560 SetFlag(ThreadFlag flag)1561 void SetFlag(ThreadFlag flag) 1562 { 1563 glueData_.stateAndFlags_.asAtomicInt.fetch_or(flag, std::memory_order_seq_cst); 1564 } 1565 ClearFlag(ThreadFlag flag)1566 void ClearFlag(ThreadFlag flag) 1567 { 1568 glueData_.stateAndFlags_.asAtomicInt.fetch_and(UINT32_MAX ^ flag, std::memory_order_seq_cst); 1569 } 1570 1571 void DumpStack() DUMP_API_ATTR; 1572 1573 static size_t GetAsmStackLimit(); 1574 1575 static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB; 1576 1577 GlueData glueData_; 1578 std::atomic<ThreadId> id_ {0}; 1579 EcmaVM *vm_ {nullptr}; 1580 void *env_ {nullptr}; 1581 Area *regExpCache_ {nullptr}; 1582 1583 // MM: handles, global-handles, and aot-stubs. 1584 int nestedLevel_ = 0; 1585 NativeAreaAllocator *nativeAreaAllocator_ {nullptr}; 1586 HeapRegionAllocator *heapRegionAllocator_ {nullptr}; 1587 bool runningNativeFinalizeCallbacks_ {false}; 1588 std::vector<std::pair<WeakClearCallback, void *>> weakNodeFreeGlobalCallbacks_ {}; 1589 std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {}; 1590 1591 EcmaGlobalStorage<Node> *globalStorage_ {nullptr}; 1592 EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr}; 1593 int32_t stackTraceFd_ {-1}; 1594 1595 std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_; 1596 std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_; 1597 std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_, 1598 WeakClearCallback nativeFinalizeCallBack)> setWeak_; 1599 std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_; 1600 std::function<bool(uintptr_t addr)> isWeak_; 1601 NativePointerTaskCallback asyncCleanTaskCb_ {nullptr}; 1602 WeakFinalizeTaskCallback finalizeTaskCallback_ {nullptr}; 1603 uint32_t globalNumberCount_ {0}; 1604 1605 // Run-time state 1606 bool getStackSignal_ {false}; 1607 bool runtimeState_ {false}; 1608 bool isAsmInterpreter_ {false}; 1609 VmThreadControl *vmThreadControl_ {nullptr}; 1610 bool enableStackSourceFile_ {true}; 1611 bool enableLazyBuiltins_ {false}; 1612 bool readyForGCIterating_ {false}; 1613 // CpuProfiler 1614 bool isProfiling_ {false}; 1615 bool gcState_ {false}; 1616 std::atomic_bool needProfiling_ {false}; 1617 std::string profileName_ {""}; 1618 1619 bool finalizationCheckState_ {false}; 1620 // Shared heap 1621 bool isMainThread_ {false}; 1622 bool fullMarkRequest_ {false}; 1623 // Shared heap collect local heap Rset 1624 bool processingLocalToSharedRset_ {false}; 1625 1626 // { ElementsKind, (hclass, hclassWithProto) } 1627 CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> arrayHClassIndexMap_; 1628 CMap<JSHClass *, GlobalIndex> ctorHclassEntries_; 1629 1630 CVector<EcmaContext *> contexts_; 1631 EcmaContext *currentContext_ {nullptr}; 1632 1633 Mutex suspendLock_; 1634 int32_t suspendCount_ {0}; 1635 ConditionVariable suspendCondVar_; 1636 SuspendBarrier *suspendBarrier_ {nullptr}; 1637 1638 uint64_t jobId_ {0}; 1639 1640 ThreadType threadType_ {ThreadType::JS_THREAD}; 1641 RecursiveMutex jitMutex_; 1642 bool machineCodeLowMemory_ {false}; 1643 RecursiveMutex profileTypeAccessorLockMutex_; 1644 DateUtils *dateUtils_ {nullptr}; 1645 1646 #ifndef NDEBUG 1647 MutatorLock::MutatorLockState mutatorLockState_ = MutatorLock::MutatorLockState::UNLOCKED; 1648 std::atomic<bool> launchedSuspendAll_ {false}; 1649 #endif 1650 // Collect a map from JsError to MachineCode objects, JsError objects with stack frame generated by jit in the map. 1651 // It will be used to keep MachineCode objects alive (for dump) before JsError object be free. 1652 std::map<JSTaggedType, JitCodeVector*> jitCodeMaps_; 1653 1654 std::atomic<bool> needTermination_ {false}; 1655 std::atomic<bool> hasTerminated_ {false}; 1656 1657 bool isInConcurrentScope_ {false}; 1658 1659 friend class GlobalHandleCollection; 1660 friend class EcmaVM; 1661 friend class EcmaContext; 1662 friend class JitVM; 1663 }; 1664 } // namespace panda::ecmascript 1665 #endif // ECMASCRIPT_JS_THREAD_H 1666