1 /* 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_JS_THREAD_H 17 #define ECMASCRIPT_JS_THREAD_H 18 19 #include <atomic> 20 #include <sstream> 21 22 #include "ecmascript/base/aligned_struct.h" 23 #include "ecmascript/compiler/builtins/builtins_call_signature.h" 24 #include "ecmascript/compiler/common_stubs.h" 25 #include "ecmascript/compiler/interpreter_stub.h" 26 #include "ecmascript/compiler/rt_call_signature.h" 27 #include "ecmascript/dfx/vm_thread_control.h" 28 #include "ecmascript/frames.h" 29 #include "ecmascript/global_env_constants.h" 30 #include "ecmascript/mem/visitor.h" 31 32 #include "libpandabase/os/thread.h" 33 34 namespace panda::ecmascript { 35 class EcmaHandleScope; 36 class EcmaVM; 37 class HeapRegionAllocator; 38 class PropertiesCache; 39 template<typename T> 40 class EcmaGlobalStorage; 41 class Node; 42 class DebugNode; 43 using WeakClearCallback = void (*)(void *); 44 45 enum class MarkStatus : uint8_t { 46 READY_TO_MARK, 47 MARKING, 48 MARK_FINISHED, 49 }; 50 51 enum class PGOProfilerStatus : uint8_t { 52 PGO_PROFILER_DISABLE, 53 PGO_PROFILER_ENABLE, 54 }; 55 56 struct BCStubEntries { 57 static constexpr size_t EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT = 58 kungfu::BytecodeStubCSigns::NUM_OF_ALL_NORMAL_STUBS; 59 // The number of bytecodes. 60 static constexpr size_t BC_HANDLER_COUNT = kungfu::BytecodeStubCSigns::LAST_VALID_OPCODE + 1; 61 static constexpr size_t COUNT = kungfu::BytecodeStubCSigns::NUM_OF_STUBS; 62 static_assert(EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT <= COUNT); 63 Address stubEntries_[COUNT] = {0}; 64 65 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 66 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 67 SetBCStubEntries68 void Set(size_t index, Address addr) 69 { 70 ASSERT(index < COUNT); 71 stubEntries_[index] = addr; 72 } 73 GetAddrBCStubEntries74 Address* GetAddr() 75 { 76 return reinterpret_cast<Address*>(stubEntries_); 77 } 78 GetBCStubEntries79 Address Get(size_t index) const 80 { 81 ASSERT(index < COUNT); 82 return stubEntries_[index]; 83 } 84 }; 85 STATIC_ASSERT_EQ_ARCH(sizeof(BCStubEntries), BCStubEntries::SizeArch32, BCStubEntries::SizeArch64); 86 87 struct RTStubEntries { 88 static constexpr size_t COUNT = kungfu::RuntimeStubCSigns::NUM_OF_STUBS; 89 Address stubEntries_[COUNT]; 90 91 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 92 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 93 SetRTStubEntries94 void Set(size_t index, Address addr) 95 { 96 ASSERT(index < COUNT); 97 stubEntries_[index] = addr; 98 } 99 GetRTStubEntries100 Address Get(size_t index) const 101 { 102 ASSERT(index < COUNT); 103 return stubEntries_[index]; 104 } 105 }; 106 STATIC_ASSERT_EQ_ARCH(sizeof(RTStubEntries), RTStubEntries::SizeArch32, RTStubEntries::SizeArch64); 107 108 struct COStubEntries { 109 static constexpr size_t COUNT = kungfu::CommonStubCSigns::NUM_OF_STUBS; 110 Address stubEntries_[COUNT]; 111 112 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 113 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 114 SetCOStubEntries115 void Set(size_t index, Address addr) 116 { 117 ASSERT(index < COUNT); 118 stubEntries_[index] = addr; 119 } 120 GetCOStubEntries121 Address Get(size_t index) const 122 { 123 ASSERT(index < COUNT); 124 return stubEntries_[index]; 125 } 126 }; 127 128 struct BCDebuggerStubEntries { 129 static constexpr size_t EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT = 130 kungfu::BytecodeStubCSigns::NUM_OF_ALL_NORMAL_STUBS; 131 static constexpr size_t COUNT = kungfu::BytecodeStubCSigns::LAST_VALID_OPCODE + 1; 132 Address stubEntries_[COUNT]; 133 134 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 135 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 136 SetBCDebuggerStubEntries137 void Set(size_t index, Address addr) 138 { 139 ASSERT(index < COUNT); 140 stubEntries_[index] = addr; 141 } 142 GetBCDebuggerStubEntries143 Address Get(size_t index) const 144 { 145 ASSERT(index < COUNT); 146 return stubEntries_[index]; 147 } 148 SetNonexistentBCHandlerStubEntriesBCDebuggerStubEntries149 void SetNonexistentBCHandlerStubEntries(Address addr) 150 { 151 for (size_t i = EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT; i < COUNT; i++) { 152 if (stubEntries_[i] == 0) { 153 stubEntries_[i] = addr; 154 } 155 } 156 } 157 }; 158 159 struct BuiltinStubEntries { 160 static constexpr size_t COUNT = kungfu::BuiltinsStubCSigns::NUM_OF_BUILTINS_STUBS; 161 Address stubEntries_[COUNT]; 162 163 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 164 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 165 SetBuiltinStubEntries166 void Set(size_t index, Address addr) 167 { 168 ASSERT(index < COUNT); 169 stubEntries_[index] = addr; 170 } 171 GetBuiltinStubEntries172 Address Get(size_t index) const 173 { 174 ASSERT(index < COUNT); 175 return stubEntries_[index]; 176 } 177 }; 178 STATIC_ASSERT_EQ_ARCH(sizeof(COStubEntries), COStubEntries::SizeArch32, COStubEntries::SizeArch64); 179 180 class JSThread { 181 public: 182 static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2; 183 static constexpr int PGO_PROFILER_BITFIELD_NUM = 1; 184 static constexpr uint32_t RESERVE_STACK_SIZE = 128; 185 using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>; 186 using PGOStatusBits = MarkStatusBits::NextField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_NUM>; 187 using ThreadId = uint32_t; 188 189 JSThread(EcmaVM *vm); 190 191 PUBLIC_API ~JSThread(); 192 GetEcmaVM()193 EcmaVM *GetEcmaVM() const 194 { 195 return vm_; 196 } 197 198 static JSThread *Create(EcmaVM *vm); 199 GetNestedLevel()200 int GetNestedLevel() const 201 { 202 return nestedLevel_; 203 } 204 SetNestedLevel(int level)205 void SetNestedLevel(int level) 206 { 207 nestedLevel_ = level; 208 } 209 SetLastFp(JSTaggedType * fp)210 void SetLastFp(JSTaggedType *fp) 211 { 212 glueData_.lastFp_ = fp; 213 } 214 GetCurrentSPFrame()215 const JSTaggedType *GetCurrentSPFrame() const 216 { 217 return glueData_.currentFrame_; 218 } 219 SetCurrentSPFrame(JSTaggedType * sp)220 void SetCurrentSPFrame(JSTaggedType *sp) 221 { 222 glueData_.currentFrame_ = sp; 223 } 224 GetLastLeaveFrame()225 const JSTaggedType *GetLastLeaveFrame() const 226 { 227 return glueData_.leaveFrame_; 228 } 229 SetLastLeaveFrame(JSTaggedType * sp)230 void SetLastLeaveFrame(JSTaggedType *sp) 231 { 232 glueData_.leaveFrame_ = sp; 233 } 234 235 const JSTaggedType *GetCurrentFrame() const; 236 237 void SetCurrentFrame(JSTaggedType *sp); 238 239 const JSTaggedType *GetCurrentInterpretedFrame() const; 240 241 bool DoStackOverflowCheck(const JSTaggedType *sp); 242 GetNativeAreaAllocator()243 NativeAreaAllocator *GetNativeAreaAllocator() const 244 { 245 return nativeAreaAllocator_; 246 } 247 GetHeapRegionAllocator()248 HeapRegionAllocator *GetHeapRegionAllocator() const 249 { 250 return heapRegionAllocator_; 251 } 252 ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)253 void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 254 { 255 glueData_.newSpaceAllocationTopAddress_ = top; 256 glueData_.newSpaceAllocationEndAddress_ = end; 257 } 258 259 void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor, 260 const RootBaseAndDerivedVisitor &derivedVisitor); 261 262 void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor); 263 264 uintptr_t* PUBLIC_API ExpandHandleStorage(); 265 void PUBLIC_API ShrinkHandleStorage(int prevIndex); 266 void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const; 267 bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const; 268 GetHandleScopeStorageNext()269 JSTaggedType *GetHandleScopeStorageNext() const 270 { 271 return handleScopeStorageNext_; 272 } 273 SetHandleScopeStorageNext(JSTaggedType * value)274 void SetHandleScopeStorageNext(JSTaggedType *value) 275 { 276 handleScopeStorageNext_ = value; 277 } 278 GetHandleScopeStorageEnd()279 JSTaggedType *GetHandleScopeStorageEnd() const 280 { 281 return handleScopeStorageEnd_; 282 } 283 GetWeakNodeSecondPassCallbacks()284 std::vector<std::pair<WeakClearCallback, void *>> *GetWeakNodeSecondPassCallbacks() 285 { 286 return &weakNodeSecondPassCallbacks_; 287 } 288 SetHandleScopeStorageEnd(JSTaggedType * value)289 void SetHandleScopeStorageEnd(JSTaggedType *value) 290 { 291 handleScopeStorageEnd_ = value; 292 } 293 GetCurrentHandleStorageIndex()294 int GetCurrentHandleStorageIndex() 295 { 296 return currentHandleStorageIndex_; 297 } 298 HandleScopeCountAdd()299 void HandleScopeCountAdd() 300 { 301 handleScopeCount_++; 302 } 303 HandleScopeCountDec()304 void HandleScopeCountDec() 305 { 306 handleScopeCount_--; 307 } 308 SetLastHandleScope(EcmaHandleScope * scope)309 void SetLastHandleScope(EcmaHandleScope *scope) 310 { 311 lastHandleScope_ = scope; 312 } 313 GetLastHandleScope()314 EcmaHandleScope *GetLastHandleScope() 315 { 316 return lastHandleScope_; 317 } 318 319 void SetException(JSTaggedValue exception); 320 GetException()321 JSTaggedValue GetException() const 322 { 323 return glueData_.exception_; 324 } 325 HasPendingException()326 bool HasPendingException() const 327 { 328 return !glueData_.exception_.IsHole(); 329 } 330 331 void ClearException(); 332 SetGlobalObject(JSTaggedValue globalObject)333 void SetGlobalObject(JSTaggedValue globalObject) 334 { 335 glueData_.globalObject_ = globalObject; 336 } 337 GetStableArrayElementsGuardians()338 JSTaggedValue GetStableArrayElementsGuardians() const 339 { 340 return glueData_.stableArrayElementsGuardians_; 341 } 342 SetStableArrayElementsGuardians(JSTaggedValue guardians)343 void SetStableArrayElementsGuardians(JSTaggedValue guardians) 344 { 345 glueData_.stableArrayElementsGuardians_ = guardians; 346 } 347 GlobalConstants()348 const GlobalEnvConstants *GlobalConstants() const 349 { 350 return &glueData_.globalConst_; 351 } 352 353 void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver); 354 IsStableArrayElementsGuardiansInvalid()355 bool IsStableArrayElementsGuardiansInvalid() const 356 { 357 return !stableArrayElementsGuardians_; 358 } 359 360 void ResetGuardians(); 361 362 JSTaggedValue GetCurrentLexenv() const; 363 RegisterRTInterface(size_t id,Address addr)364 void RegisterRTInterface(size_t id, Address addr) 365 { 366 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 367 glueData_.rtStubEntries_.Set(id, addr); 368 } 369 GetRTInterface(size_t id)370 Address GetRTInterface(size_t id) const 371 { 372 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 373 return glueData_.rtStubEntries_.Get(id); 374 } 375 GetFastStubEntry(uint32_t id)376 Address GetFastStubEntry(uint32_t id) const 377 { 378 return glueData_.coStubEntries_.Get(id); 379 } 380 SetFastStubEntry(size_t id,Address entry)381 void SetFastStubEntry(size_t id, Address entry) 382 { 383 glueData_.coStubEntries_.Set(id, entry); 384 } 385 GetBuiltinStubEntry(uint32_t id)386 Address GetBuiltinStubEntry(uint32_t id) const 387 { 388 return glueData_.builtinStubEntries_.Get(id); 389 } 390 SetBuiltinStubEntry(size_t id,Address entry)391 void SetBuiltinStubEntry(size_t id, Address entry) 392 { 393 glueData_.builtinStubEntries_.Set(id, entry); 394 } 395 GetBCStubEntry(uint32_t id)396 Address GetBCStubEntry(uint32_t id) const 397 { 398 return glueData_.bcStubEntries_.Get(id); 399 } 400 SetBCStubEntry(size_t id,Address entry)401 void SetBCStubEntry(size_t id, Address entry) 402 { 403 glueData_.bcStubEntries_.Set(id, entry); 404 } 405 SetBCDebugStubEntry(size_t id,Address entry)406 void SetBCDebugStubEntry(size_t id, Address entry) 407 { 408 glueData_.bcDebuggerStubEntries_.Set(id, entry); 409 } 410 GetBytecodeHandler()411 Address *GetBytecodeHandler() 412 { 413 return glueData_.bcStubEntries_.GetAddr(); 414 } 415 416 void PUBLIC_API CheckSwitchDebuggerBCStub(); 417 GetThreadId()418 ThreadId GetThreadId() const 419 { 420 return id_.load(std::memory_order_relaxed); 421 } 422 SetThreadId()423 void SetThreadId() 424 { 425 id_.store(JSThread::GetCurrentThreadId(), std::memory_order_relaxed); 426 } 427 GetCurrentThreadId()428 static ThreadId GetCurrentThreadId() 429 { 430 return os::thread::GetCurrentThreadId(); 431 } 432 433 void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor); 434 GetPropertiesCache()435 PropertiesCache *GetPropertiesCache() const 436 { 437 return propertiesCache_; 438 } 439 SetMarkStatus(MarkStatus status)440 void SetMarkStatus(MarkStatus status) 441 { 442 MarkStatusBits::Set(status, &glueData_.threadStateBitField_); 443 } 444 IsReadyToMark()445 bool IsReadyToMark() const 446 { 447 auto status = MarkStatusBits::Decode(glueData_.threadStateBitField_); 448 return status == MarkStatus::READY_TO_MARK; 449 } 450 IsMarking()451 bool IsMarking() const 452 { 453 auto status = MarkStatusBits::Decode(glueData_.threadStateBitField_); 454 return status == MarkStatus::MARKING; 455 } 456 IsMarkFinished()457 bool IsMarkFinished() const 458 { 459 auto status = MarkStatusBits::Decode(glueData_.threadStateBitField_); 460 return status == MarkStatus::MARK_FINISHED; 461 } 462 SetPGOProfilerEnable(bool enable)463 void SetPGOProfilerEnable(bool enable) 464 { 465 PGOProfilerStatus status = 466 enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE; 467 PGOStatusBits::Set(status, &glueData_.threadStateBitField_); 468 } 469 IsPGOProfilerEnable()470 bool IsPGOProfilerEnable() const 471 { 472 auto status = PGOStatusBits::Decode(glueData_.threadStateBitField_); 473 return status == PGOProfilerStatus::PGO_PROFILER_ENABLE; 474 } 475 476 bool CheckSafepoint() const; 477 SetGetStackSignal(bool isParseStack)478 void SetGetStackSignal(bool isParseStack) 479 { 480 getStackSignal_ = isParseStack; 481 } 482 GetStackSignal()483 bool GetStackSignal() const 484 { 485 return getStackSignal_; 486 } 487 SetCallNapiGetStack(bool needGetStack)488 void SetCallNapiGetStack(bool needGetStack) 489 { 490 callNapiGetStack_ = needGetStack; 491 } 492 GetCallNapiGetStack()493 bool GetCallNapiGetStack() 494 { 495 return callNapiGetStack_; 496 } 497 SetGcState(bool gcState)498 void SetGcState(bool gcState) 499 { 500 gcState_ = gcState; 501 } 502 GetGcState()503 bool GetGcState() const 504 { 505 return gcState_; 506 } 507 SetRuntimeState(bool runtimeState)508 void SetRuntimeState(bool runtimeState) 509 { 510 runtimeState_ = runtimeState; 511 } 512 GetRuntimeState()513 bool GetRuntimeState() const 514 { 515 return runtimeState_; 516 } 517 EnableAsmInterpreter()518 void EnableAsmInterpreter() 519 { 520 isAsmInterpreter_ = true; 521 } 522 IsAsmInterpreter()523 bool IsAsmInterpreter() const 524 { 525 return isAsmInterpreter_; 526 } 527 GetVmThreadControl()528 VmThreadControl *GetVmThreadControl() const 529 { 530 return vmThreadControl_; 531 } 532 GetGlueDataOffset()533 static constexpr size_t GetGlueDataOffset() 534 { 535 return MEMBER_OFFSET(JSThread, glueData_); 536 } 537 GetGlueAddr()538 uintptr_t GetGlueAddr() const 539 { 540 return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset(); 541 } 542 GlueToJSThread(uintptr_t glue)543 static JSThread *GlueToJSThread(uintptr_t glue) 544 { 545 // very careful to modify here 546 return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset()); 547 } 548 GetCurrentStackPosition()549 static uintptr_t GetCurrentStackPosition() 550 { 551 return reinterpret_cast<uintptr_t>(__builtin_frame_address(0)); 552 } 553 554 bool IsLegalAsmSp(uintptr_t sp) const; 555 556 bool IsLegalThreadSp(uintptr_t sp) const; 557 558 bool IsLegalSp(uintptr_t sp) const; 559 IsPrintBCOffset()560 bool IsPrintBCOffset() const 561 { 562 return enablePrintBCOffset_; 563 } 564 SetPrintBCOffset(bool flag)565 void SetPrintBCOffset(bool flag) 566 { 567 enablePrintBCOffset_ = flag; 568 } 569 570 void CollectBCOffsetInfo(); 571 SetCheckAndCallEnterState(bool state)572 void SetCheckAndCallEnterState(bool state) 573 { 574 finalizationCheckState_ = state; 575 } 576 GetCheckAndCallEnterState()577 bool GetCheckAndCallEnterState() const 578 { 579 return finalizationCheckState_; 580 } 581 GetStackStart()582 uint64_t GetStackStart() const 583 { 584 return glueData_.stackStart_; 585 } 586 GetStackLimit()587 uint64_t GetStackLimit() const 588 { 589 return glueData_.stackLimit_; 590 } 591 GetGlueGlobalEnv()592 GlobalEnv *GetGlueGlobalEnv() 593 { 594 return glueData_.glueGlobalEnv_; 595 } 596 SetGlueGlobalEnv(GlobalEnv * global)597 void SetGlueGlobalEnv(GlobalEnv *global) 598 { 599 ASSERT(global != nullptr); 600 glueData_.glueGlobalEnv_ = global; 601 } 602 NewGlobalHandle(JSTaggedType value)603 inline uintptr_t NewGlobalHandle(JSTaggedType value) 604 { 605 return newGlobalHandle_(value); 606 } 607 DisposeGlobalHandle(uintptr_t nodeAddr)608 inline void DisposeGlobalHandle(uintptr_t nodeAddr) 609 { 610 disposeGlobalHandle_(nodeAddr); 611 } 612 613 inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr, 614 WeakClearCallback nativeFinalizeCallBack = nullptr) 615 { 616 return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack); 617 } 618 ClearWeak(uintptr_t nodeAddr)619 inline uintptr_t ClearWeak(uintptr_t nodeAddr) 620 { 621 return clearWeak_(nodeAddr); 622 } 623 IsWeak(uintptr_t addr)624 inline bool IsWeak(uintptr_t addr) const 625 { 626 return isWeak_(addr); 627 } 628 629 bool IsStartGlobalLeakCheck() const; 630 bool EnableGlobalObjectLeakCheck() const; 631 bool EnableGlobalPrimitiveLeakCheck() const; 632 void WriteToStackTraceFd(std::ostringstream &buffer) const; 633 void SetStackTraceFd(int32_t fd); 634 void CloseStackTraceFd(); IncreaseGlobalNumberCount()635 uint32_t IncreaseGlobalNumberCount() 636 { 637 return ++globalNumberCount_; 638 } 639 640 struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(), 641 BCStubEntries, 642 JSTaggedValue, 643 JSTaggedValue, 644 JSTaggedValue, 645 base::AlignedPointer, 646 base::AlignedPointer, 647 base::AlignedPointer, 648 base::AlignedPointer, 649 base::AlignedPointer, 650 RTStubEntries, 651 COStubEntries, 652 BuiltinStubEntries, 653 BCDebuggerStubEntries, 654 base::AlignedUint64, 655 base::AlignedPointer, 656 base::AlignedUint64, 657 base::AlignedUint64, 658 base::AlignedPointer, 659 GlobalEnvConstants> { 660 enum class Index : size_t { 661 BCStubEntriesIndex = 0, 662 ExceptionIndex, 663 GlobalObjIndex, 664 StableArrayElementsGuardiansIndex, 665 CurrentFrameIndex, 666 LeaveFrameIndex, 667 LastFpIndex, 668 NewSpaceAllocationTopAddressIndex, 669 NewSpaceAllocationEndAddressIndex, 670 RTStubEntriesIndex, 671 COStubEntriesIndex, 672 BuiltinsStubEntriesIndex, 673 BCDebuggerStubEntriesIndex, 674 StateBitFieldIndex, 675 FrameBaseIndex, 676 StackStartIndex, 677 StackLimitIndex, 678 GlueGlobalEnvIndex, 679 GlobalConstIndex, 680 NumOfMembers 681 }; 682 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes); 683 GetExceptionOffsetGlueData684 static size_t GetExceptionOffset(bool isArch32) 685 { 686 return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32); 687 } 688 GetGlobalObjOffsetGlueData689 static size_t GetGlobalObjOffset(bool isArch32) 690 { 691 return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32); 692 } 693 GetStableArrayElementsGuardiansOffsetGlueData694 static size_t GetStableArrayElementsGuardiansOffset(bool isArch32) 695 { 696 return GetOffset<static_cast<size_t>(Index::StableArrayElementsGuardiansIndex)>(isArch32); 697 } 698 GetGlobalConstOffsetGlueData699 static size_t GetGlobalConstOffset(bool isArch32) 700 { 701 return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32); 702 } 703 GetStateBitFieldOffsetGlueData704 static size_t GetStateBitFieldOffset(bool isArch32) 705 { 706 return GetOffset<static_cast<size_t>(Index::StateBitFieldIndex)>(isArch32); 707 } 708 GetCurrentFrameOffsetGlueData709 static size_t GetCurrentFrameOffset(bool isArch32) 710 { 711 return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32); 712 } 713 GetLeaveFrameOffsetGlueData714 static size_t GetLeaveFrameOffset(bool isArch32) 715 { 716 return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32); 717 } 718 GetLastFpOffsetGlueData719 static size_t GetLastFpOffset(bool isArch32) 720 { 721 return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32); 722 } 723 GetNewSpaceAllocationTopAddressOffsetGlueData724 static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32) 725 { 726 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32); 727 } 728 GetNewSpaceAllocationEndAddressOffsetGlueData729 static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32) 730 { 731 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32); 732 } 733 GetBCStubEntriesOffsetGlueData734 static size_t GetBCStubEntriesOffset(bool isArch32) 735 { 736 return GetOffset<static_cast<size_t>(Index::BCStubEntriesIndex)>(isArch32); 737 } 738 GetRTStubEntriesOffsetGlueData739 static size_t GetRTStubEntriesOffset(bool isArch32) 740 { 741 return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32); 742 } 743 GetCOStubEntriesOffsetGlueData744 static size_t GetCOStubEntriesOffset(bool isArch32) 745 { 746 return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32); 747 } 748 GetBuiltinsStubEntriesOffsetGlueData749 static size_t GetBuiltinsStubEntriesOffset(bool isArch32) 750 { 751 return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32); 752 } 753 GetBCDebuggerStubEntriesOffsetGlueData754 static size_t GetBCDebuggerStubEntriesOffset(bool isArch32) 755 { 756 return GetOffset<static_cast<size_t>(Index::BCDebuggerStubEntriesIndex)>(isArch32); 757 } 758 GetFrameBaseOffsetGlueData759 static size_t GetFrameBaseOffset(bool isArch32) 760 { 761 return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32); 762 } 763 GetStackLimitOffsetGlueData764 static size_t GetStackLimitOffset(bool isArch32) 765 { 766 return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32); 767 } 768 GetGlueGlobalEnvOffsetGlueData769 static size_t GetGlueGlobalEnvOffset(bool isArch32) 770 { 771 return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32); 772 } 773 GetGlueGlobalConstOffsetGlueData774 static size_t GetGlueGlobalConstOffset(bool isArch32) 775 { 776 return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32); 777 } 778 779 alignas(EAS) BCStubEntries bcStubEntries_; 780 alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()}; 781 alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()}; 782 alignas(EAS) JSTaggedValue stableArrayElementsGuardians_ {JSTaggedValue::True()}; 783 alignas(EAS) JSTaggedType *currentFrame_ {nullptr}; 784 alignas(EAS) JSTaggedType *leaveFrame_ {nullptr}; 785 alignas(EAS) JSTaggedType *lastFp_ {nullptr}; 786 alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr}; 787 alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr}; 788 alignas(EAS) RTStubEntries rtStubEntries_; 789 alignas(EAS) COStubEntries coStubEntries_; 790 alignas(EAS) BuiltinStubEntries builtinStubEntries_; 791 alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_; 792 alignas(EAS) volatile uint64_t threadStateBitField_ {0ULL}; 793 alignas(EAS) JSTaggedType *frameBase_ {nullptr}; 794 alignas(EAS) uint64_t stackStart_ {0}; 795 alignas(EAS) uint64_t stackLimit_ {0}; 796 alignas(EAS) GlobalEnv *glueGlobalEnv_; 797 alignas(EAS) GlobalEnvConstants globalConst_; 798 }; 799 STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64); 800 801 private: 802 NO_COPY_SEMANTIC(JSThread); 803 NO_MOVE_SEMANTIC(JSThread); 804 805 void DumpStack() DUMP_API_ATTR; 806 807 static size_t GetAsmStackLimit(); 808 809 static const uint32_t NODE_BLOCK_SIZE_LOG2 = 10; 810 static const uint32_t NODE_BLOCK_SIZE = 1U << NODE_BLOCK_SIZE_LOG2; 811 static constexpr int32_t MIN_HANDLE_STORAGE_SIZE = 2; 812 GlueData glueData_; 813 std::atomic<ThreadId> id_; 814 EcmaVM *vm_ {nullptr}; 815 816 // MM: handles, global-handles, and aot-stubs. 817 int nestedLevel_ = 0; 818 NativeAreaAllocator *nativeAreaAllocator_ {nullptr}; 819 HeapRegionAllocator *heapRegionAllocator_ {nullptr}; 820 JSTaggedType *handleScopeStorageNext_ {nullptr}; 821 JSTaggedType *handleScopeStorageEnd_ {nullptr}; 822 std::vector<std::array<JSTaggedType, NODE_BLOCK_SIZE> *> handleStorageNodes_ {}; 823 int32_t currentHandleStorageIndex_ {-1}; 824 int32_t handleScopeCount_ {0}; 825 EcmaHandleScope *lastHandleScope_ {nullptr}; 826 std::vector<std::pair<WeakClearCallback, void *>> weakNodeSecondPassCallbacks_ {}; 827 828 PropertiesCache *propertiesCache_ {nullptr}; 829 EcmaGlobalStorage<Node> *globalStorage_ {nullptr}; 830 EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr}; 831 int32_t stackTraceFd_ {-1}; 832 833 std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_; 834 std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_; 835 std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_, 836 WeakClearCallback nativeFinalizeCallBack)> setWeak_; 837 std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_; 838 std::function<bool(uintptr_t addr)> isWeak_; 839 uint32_t globalNumberCount_ {0}; 840 841 // Run-time state 842 bool getStackSignal_ {false}; 843 bool callNapiGetStack_ {false}; 844 bool gcState_ {false}; 845 bool runtimeState_ {false}; 846 bool isAsmInterpreter_ {false}; 847 VmThreadControl *vmThreadControl_ {nullptr}; 848 bool enablePrintBCOffset_ {false}; 849 bool stableArrayElementsGuardians_ {true}; 850 851 bool finalizationCheckState_ {false}; 852 853 friend class EcmaHandleScope; 854 friend class GlobalHandleCollection; 855 }; 856 } // namespace panda::ecmascript 857 #endif // ECMASCRIPT_JS_THREAD_H 858