1 /* 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_JS_THREAD_H 17 #define ECMASCRIPT_JS_THREAD_H 18 19 #include <atomic> 20 #include <sstream> 21 22 #include "ecmascript/base/aligned_struct.h" 23 #include "ecmascript/compiler/builtins/builtins_call_signature.h" 24 #include "ecmascript/compiler/common_stubs.h" 25 #include "ecmascript/compiler/interpreter_stub.h" 26 #include "ecmascript/compiler/rt_call_signature.h" 27 #include "ecmascript/dfx/vm_thread_control.h" 28 #include "ecmascript/elements.h" 29 #include "ecmascript/frames.h" 30 #include "ecmascript/global_env_constants.h" 31 #include "ecmascript/mem/visitor.h" 32 33 namespace panda::ecmascript { 34 class EcmaContext; 35 class EcmaVM; 36 class EcmaHandleScope; 37 class HeapRegionAllocator; 38 class PropertiesCache; 39 template<typename T> 40 class EcmaGlobalStorage; 41 class Node; 42 class DebugNode; 43 class VmThreadControl; 44 using WeakClearCallback = void (*)(void *); 45 46 enum class MarkStatus : uint8_t { 47 READY_TO_MARK, 48 MARKING, 49 MARK_FINISHED, 50 }; 51 52 enum class PGOProfilerStatus : uint8_t { 53 PGO_PROFILER_DISABLE, 54 PGO_PROFILER_ENABLE, 55 }; 56 57 enum class BCStubStatus: uint8_t { 58 NORMAL_BC_STUB, 59 PROFILE_BC_STUB, 60 }; 61 62 enum class StableArrayChangeKind { PROTO, NOT_PROTO }; 63 64 struct BCStubEntries { 65 static constexpr size_t EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT = 66 kungfu::BytecodeStubCSigns::NUM_OF_ALL_NORMAL_STUBS; 67 // The number of bytecodes. 68 static constexpr size_t BC_HANDLER_COUNT = kungfu::BytecodeStubCSigns::LAST_VALID_OPCODE + 1; 69 static constexpr size_t COUNT = kungfu::BytecodeStubCSigns::NUM_OF_STUBS; 70 static_assert(EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT <= COUNT); 71 Address stubEntries_[COUNT] = {0}; 72 73 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 74 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 75 SetBCStubEntries76 void Set(size_t index, Address addr) 77 { 78 ASSERT(index < COUNT); 79 stubEntries_[index] = addr; 80 } 81 GetAddrBCStubEntries82 Address* GetAddr() 83 { 84 return reinterpret_cast<Address*>(stubEntries_); 85 } 86 GetBCStubEntries87 Address Get(size_t index) const 88 { 89 ASSERT(index < COUNT); 90 return stubEntries_[index]; 91 } 92 }; 93 STATIC_ASSERT_EQ_ARCH(sizeof(BCStubEntries), BCStubEntries::SizeArch32, BCStubEntries::SizeArch64); 94 95 struct RTStubEntries { 96 static constexpr size_t COUNT = kungfu::RuntimeStubCSigns::NUM_OF_STUBS; 97 Address stubEntries_[COUNT]; 98 99 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 100 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 101 SetRTStubEntries102 void Set(size_t index, Address addr) 103 { 104 ASSERT(index < COUNT); 105 stubEntries_[index] = addr; 106 } 107 GetRTStubEntries108 Address Get(size_t index) const 109 { 110 ASSERT(index < COUNT); 111 return stubEntries_[index]; 112 } 113 }; 114 STATIC_ASSERT_EQ_ARCH(sizeof(RTStubEntries), RTStubEntries::SizeArch32, RTStubEntries::SizeArch64); 115 116 struct COStubEntries { 117 static constexpr size_t COUNT = kungfu::CommonStubCSigns::NUM_OF_STUBS; 118 Address stubEntries_[COUNT]; 119 120 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 121 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 122 SetCOStubEntries123 void Set(size_t index, Address addr) 124 { 125 ASSERT(index < COUNT); 126 stubEntries_[index] = addr; 127 } 128 GetCOStubEntries129 Address Get(size_t index) const 130 { 131 ASSERT(index < COUNT); 132 return stubEntries_[index]; 133 } 134 }; 135 136 struct BCDebuggerStubEntries { 137 static constexpr size_t EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT = 138 kungfu::BytecodeStubCSigns::NUM_OF_ALL_NORMAL_STUBS; 139 static constexpr size_t COUNT = kungfu::BytecodeStubCSigns::LAST_VALID_OPCODE + 1; 140 Address stubEntries_[COUNT]; 141 142 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 143 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 144 SetBCDebuggerStubEntries145 void Set(size_t index, Address addr) 146 { 147 ASSERT(index < COUNT); 148 stubEntries_[index] = addr; 149 } 150 GetBCDebuggerStubEntries151 Address Get(size_t index) const 152 { 153 ASSERT(index < COUNT); 154 return stubEntries_[index]; 155 } 156 SetNonexistentBCHandlerStubEntriesBCDebuggerStubEntries157 void SetNonexistentBCHandlerStubEntries(Address addr) 158 { 159 for (size_t i = EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT; i < COUNT; i++) { 160 if (stubEntries_[i] == 0) { 161 stubEntries_[i] = addr; 162 } 163 } 164 } 165 }; 166 167 struct BuiltinStubEntries { 168 static constexpr size_t COUNT = kungfu::BuiltinsStubCSigns::NUM_OF_BUILTINS_STUBS; 169 Address stubEntries_[COUNT]; 170 171 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 172 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 173 SetBuiltinStubEntries174 void Set(size_t index, Address addr) 175 { 176 ASSERT(index < COUNT); 177 stubEntries_[index] = addr; 178 } 179 GetBuiltinStubEntries180 Address Get(size_t index) const 181 { 182 ASSERT(index < COUNT); 183 return stubEntries_[index]; 184 } 185 }; 186 STATIC_ASSERT_EQ_ARCH(sizeof(COStubEntries), COStubEntries::SizeArch32, COStubEntries::SizeArch64); 187 188 class JSThread { 189 public: 190 static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2; 191 static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8; 192 static constexpr int PGO_PROFILER_BITFIELD_START = 16; 193 static constexpr int BOOL_BITFIELD_NUM = 1; 194 static constexpr uint32_t RESERVE_STACK_SIZE = 128; 195 using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>; 196 using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>; 197 using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>; 198 using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag; 199 using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>; 200 using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BOOL_BITFIELD_NUM>; 201 using ThreadId = uint32_t; 202 203 explicit JSThread(EcmaVM *vm); 204 205 PUBLIC_API ~JSThread(); 206 GetEcmaVM()207 EcmaVM *GetEcmaVM() const 208 { 209 return vm_; 210 } 211 212 static JSThread *Create(EcmaVM *vm); 213 GetNestedLevel()214 int GetNestedLevel() const 215 { 216 return nestedLevel_; 217 } 218 SetNestedLevel(int level)219 void SetNestedLevel(int level) 220 { 221 nestedLevel_ = level; 222 } 223 SetLastFp(JSTaggedType * fp)224 void SetLastFp(JSTaggedType *fp) 225 { 226 glueData_.lastFp_ = fp; 227 } 228 GetLastFp()229 const JSTaggedType *GetLastFp() const 230 { 231 return glueData_.lastFp_; 232 } 233 GetCurrentSPFrame()234 const JSTaggedType *GetCurrentSPFrame() const 235 { 236 return glueData_.currentFrame_; 237 } 238 SetCurrentSPFrame(JSTaggedType * sp)239 void SetCurrentSPFrame(JSTaggedType *sp) 240 { 241 glueData_.currentFrame_ = sp; 242 } 243 GetLastLeaveFrame()244 const JSTaggedType *GetLastLeaveFrame() const 245 { 246 return glueData_.leaveFrame_; 247 } 248 SetLastLeaveFrame(JSTaggedType * sp)249 void SetLastLeaveFrame(JSTaggedType *sp) 250 { 251 glueData_.leaveFrame_ = sp; 252 } 253 254 const JSTaggedType *GetCurrentFrame() const; 255 256 void SetCurrentFrame(JSTaggedType *sp); 257 258 const JSTaggedType *GetCurrentInterpretedFrame() const; 259 260 bool DoStackOverflowCheck(const JSTaggedType *sp); 261 GetNativeAreaAllocator()262 NativeAreaAllocator *GetNativeAreaAllocator() const 263 { 264 return nativeAreaAllocator_; 265 } 266 GetHeapRegionAllocator()267 HeapRegionAllocator *GetHeapRegionAllocator() const 268 { 269 return heapRegionAllocator_; 270 } 271 ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)272 void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 273 { 274 glueData_.newSpaceAllocationTopAddress_ = top; 275 glueData_.newSpaceAllocationEndAddress_ = end; 276 } 277 SetIsStartHeapSampling(bool isStart)278 void SetIsStartHeapSampling(bool isStart) 279 { 280 glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False(); 281 } 282 283 void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor, 284 const RootBaseAndDerivedVisitor &derivedVisitor); 285 286 void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor); 287 288 uintptr_t* PUBLIC_API ExpandHandleStorage(); 289 void PUBLIC_API ShrinkHandleStorage(int prevIndex); 290 void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const; 291 bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const; 292 GetWeakNodeNativeFinalizeCallbacks()293 std::vector<std::pair<WeakClearCallback, void *>> *GetWeakNodeNativeFinalizeCallbacks() 294 { 295 return &weakNodeNativeFinalizeCallbacks_; 296 } 297 298 void SetException(JSTaggedValue exception); 299 GetException()300 JSTaggedValue GetException() const 301 { 302 return glueData_.exception_; 303 } 304 HasPendingException()305 bool HasPendingException() const 306 { 307 return !glueData_.exception_.IsHole(); 308 } 309 310 void ClearException(); 311 SetGlobalObject(JSTaggedValue globalObject)312 void SetGlobalObject(JSTaggedValue globalObject) 313 { 314 glueData_.globalObject_ = globalObject; 315 } 316 GlobalConstants()317 const GlobalEnvConstants *GlobalConstants() const 318 { 319 return glueData_.globalConst_; 320 } 321 GetArrayHClassIndexMap()322 const CMap<ElementsKind, ConstantIndex> &GetArrayHClassIndexMap() const 323 { 324 return arrayHClassIndexMap_; 325 } 326 327 void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver, StableArrayChangeKind changeKind); 328 IsStableArrayElementsGuardiansInvalid()329 bool IsStableArrayElementsGuardiansInvalid() const 330 { 331 return !glueData_.stableArrayElementsGuardians_; 332 } 333 334 void ResetGuardians(); 335 336 JSTaggedValue GetCurrentLexenv() const; 337 RegisterRTInterface(size_t id,Address addr)338 void RegisterRTInterface(size_t id, Address addr) 339 { 340 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 341 glueData_.rtStubEntries_.Set(id, addr); 342 } 343 GetRTInterface(size_t id)344 Address GetRTInterface(size_t id) const 345 { 346 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 347 return glueData_.rtStubEntries_.Get(id); 348 } 349 GetFastStubEntry(uint32_t id)350 Address GetFastStubEntry(uint32_t id) const 351 { 352 return glueData_.coStubEntries_.Get(id); 353 } 354 SetFastStubEntry(size_t id,Address entry)355 void SetFastStubEntry(size_t id, Address entry) 356 { 357 glueData_.coStubEntries_.Set(id, entry); 358 } 359 GetBuiltinStubEntry(uint32_t id)360 Address GetBuiltinStubEntry(uint32_t id) const 361 { 362 return glueData_.builtinStubEntries_.Get(id); 363 } 364 SetBuiltinStubEntry(size_t id,Address entry)365 void SetBuiltinStubEntry(size_t id, Address entry) 366 { 367 glueData_.builtinStubEntries_.Set(id, entry); 368 } 369 GetBCStubEntry(uint32_t id)370 Address GetBCStubEntry(uint32_t id) const 371 { 372 return glueData_.bcStubEntries_.Get(id); 373 } 374 SetBCStubEntry(size_t id,Address entry)375 void SetBCStubEntry(size_t id, Address entry) 376 { 377 glueData_.bcStubEntries_.Set(id, entry); 378 } 379 SetBCDebugStubEntry(size_t id,Address entry)380 void SetBCDebugStubEntry(size_t id, Address entry) 381 { 382 glueData_.bcDebuggerStubEntries_.Set(id, entry); 383 } 384 GetBytecodeHandler()385 Address *GetBytecodeHandler() 386 { 387 return glueData_.bcStubEntries_.GetAddr(); 388 } 389 390 void PUBLIC_API CheckSwitchDebuggerBCStub(); 391 void CheckOrSwitchPGOStubs(); 392 GetThreadId()393 ThreadId GetThreadId() const 394 { 395 return id_.load(std::memory_order_relaxed); 396 } 397 SetThreadId()398 void SetThreadId() 399 { 400 id_.store(JSThread::GetCurrentThreadId(), std::memory_order_relaxed); 401 } 402 GetCurrentThreadId()403 static ThreadId GetCurrentThreadId() 404 { 405 return os::thread::GetCurrentThreadId(); 406 } 407 408 void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor); 409 410 PropertiesCache *GetPropertiesCache() const; 411 SetMarkStatus(MarkStatus status)412 void SetMarkStatus(MarkStatus status) 413 { 414 MarkStatusBits::Set(status, &glueData_.gcStateBitField_); 415 } 416 IsReadyToMark()417 bool IsReadyToMark() const 418 { 419 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 420 return status == MarkStatus::READY_TO_MARK; 421 } 422 IsMarking()423 bool IsMarking() const 424 { 425 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 426 return status == MarkStatus::MARKING; 427 } 428 IsMarkFinished()429 bool IsMarkFinished() const 430 { 431 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 432 return status == MarkStatus::MARK_FINISHED; 433 } 434 SetPGOProfilerEnable(bool enable)435 void SetPGOProfilerEnable(bool enable) 436 { 437 PGOProfilerStatus status = 438 enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE; 439 PGOStatusBits::Set(status, &glueData_.interruptVector_); 440 } 441 IsPGOProfilerEnable()442 bool IsPGOProfilerEnable() const 443 { 444 auto status = PGOStatusBits::Decode(glueData_.interruptVector_); 445 return status == PGOProfilerStatus::PGO_PROFILER_ENABLE; 446 } 447 SetBCStubStatus(BCStubStatus status)448 void SetBCStubStatus(BCStubStatus status) 449 { 450 BCStubStatusBits::Set(status, &glueData_.interruptVector_); 451 } 452 GetBCStubStatus()453 BCStubStatus GetBCStubStatus() const 454 { 455 return BCStubStatusBits::Decode(glueData_.interruptVector_); 456 } 457 458 bool CheckSafepoint(); 459 SetGetStackSignal(bool isParseStack)460 void SetGetStackSignal(bool isParseStack) 461 { 462 getStackSignal_ = isParseStack; 463 } 464 GetStackSignal()465 bool GetStackSignal() const 466 { 467 return getStackSignal_; 468 } 469 SetNeedProfiling(bool needProfiling)470 void SetNeedProfiling(bool needProfiling) 471 { 472 needProfiling_.store(needProfiling); 473 } 474 SetIsProfiling(bool isProfiling)475 void SetIsProfiling(bool isProfiling) 476 { 477 isProfiling_ = isProfiling; 478 } 479 GetIsProfiling()480 bool GetIsProfiling() 481 { 482 return isProfiling_; 483 } 484 SetGcState(bool gcState)485 void SetGcState(bool gcState) 486 { 487 gcState_ = gcState; 488 } 489 GetGcState()490 bool GetGcState() const 491 { 492 return gcState_; 493 } 494 SetRuntimeState(bool runtimeState)495 void SetRuntimeState(bool runtimeState) 496 { 497 runtimeState_ = runtimeState; 498 } 499 GetRuntimeState()500 bool GetRuntimeState() const 501 { 502 return runtimeState_; 503 } 504 SetCpuProfileName(std::string & profileName)505 void SetCpuProfileName(std::string &profileName) 506 { 507 profileName_ = profileName; 508 } 509 EnableAsmInterpreter()510 void EnableAsmInterpreter() 511 { 512 isAsmInterpreter_ = true; 513 } 514 IsAsmInterpreter()515 bool IsAsmInterpreter() const 516 { 517 return isAsmInterpreter_; 518 } 519 GetVmThreadControl()520 VmThreadControl *GetVmThreadControl() const 521 { 522 return vmThreadControl_; 523 } 524 GetGlueDataOffset()525 static constexpr size_t GetGlueDataOffset() 526 { 527 return MEMBER_OFFSET(JSThread, glueData_); 528 } 529 GetGlueAddr()530 uintptr_t GetGlueAddr() const 531 { 532 return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset(); 533 } 534 GlueToJSThread(uintptr_t glue)535 static JSThread *GlueToJSThread(uintptr_t glue) 536 { 537 // very careful to modify here 538 return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset()); 539 } 540 SetCheckSafePointStatus()541 void SetCheckSafePointStatus() 542 { 543 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 544 CheckSafePointBit::Set(true, &glueData_.interruptVector_); 545 } 546 ResetCheckSafePointStatus()547 void ResetCheckSafePointStatus() 548 { 549 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 550 CheckSafePointBit::Set(false, &glueData_.interruptVector_); 551 } 552 SetVMNeedSuspension(bool flag)553 void SetVMNeedSuspension(bool flag) 554 { 555 VMNeedSuspensionBit::Set(flag, &glueData_.interruptVector_); 556 } 557 VMNeedSuspension()558 bool VMNeedSuspension() 559 { 560 return VMNeedSuspensionBit::Decode(glueData_.interruptVector_); 561 } 562 SetVMSuspended(bool flag)563 void SetVMSuspended(bool flag) 564 { 565 VMHasSuspendedBit::Set(flag, &glueData_.interruptVector_); 566 } 567 IsVMSuspended()568 bool IsVMSuspended() 569 { 570 return VMHasSuspendedBit::Decode(glueData_.interruptVector_); 571 } 572 GetCurrentStackPosition()573 static uintptr_t GetCurrentStackPosition() 574 { 575 return reinterpret_cast<uintptr_t>(__builtin_frame_address(0)); 576 } 577 578 bool IsLegalAsmSp(uintptr_t sp) const; 579 580 bool IsLegalThreadSp(uintptr_t sp) const; 581 582 bool IsLegalSp(uintptr_t sp) const; 583 SetCheckAndCallEnterState(bool state)584 void SetCheckAndCallEnterState(bool state) 585 { 586 finalizationCheckState_ = state; 587 } 588 GetCheckAndCallEnterState()589 bool GetCheckAndCallEnterState() const 590 { 591 return finalizationCheckState_; 592 } 593 GetStackStart()594 uint64_t GetStackStart() const 595 { 596 return glueData_.stackStart_; 597 } 598 GetStackLimit()599 uint64_t GetStackLimit() const 600 { 601 return glueData_.stackLimit_; 602 } 603 GetGlueGlobalEnv()604 GlobalEnv *GetGlueGlobalEnv() 605 { 606 return glueData_.glueGlobalEnv_; 607 } 608 SetGlueGlobalEnv(GlobalEnv * global)609 void SetGlueGlobalEnv(GlobalEnv *global) 610 { 611 ASSERT(global != nullptr); 612 glueData_.glueGlobalEnv_ = global; 613 } 614 NewGlobalHandle(JSTaggedType value)615 inline uintptr_t NewGlobalHandle(JSTaggedType value) 616 { 617 return newGlobalHandle_(value); 618 } 619 DisposeGlobalHandle(uintptr_t nodeAddr)620 inline void DisposeGlobalHandle(uintptr_t nodeAddr) 621 { 622 disposeGlobalHandle_(nodeAddr); 623 } 624 625 inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr, 626 WeakClearCallback nativeFinalizeCallBack = nullptr) 627 { 628 return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack); 629 } 630 ClearWeak(uintptr_t nodeAddr)631 inline uintptr_t ClearWeak(uintptr_t nodeAddr) 632 { 633 return clearWeak_(nodeAddr); 634 } 635 IsWeak(uintptr_t addr)636 inline bool IsWeak(uintptr_t addr) const 637 { 638 return isWeak_(addr); 639 } 640 EnableCrossThreadExecution()641 void EnableCrossThreadExecution() 642 { 643 glueData_.allowCrossThreadExecution_ = true; 644 } 645 IsCrossThreadExecutionEnable()646 bool IsCrossThreadExecutionEnable() const 647 { 648 return glueData_.allowCrossThreadExecution_; 649 } 650 GetFrameDroppedBit()651 bool GetFrameDroppedBit() 652 { 653 return isFrameDropped_; 654 } 655 SetFrameDroppedBit()656 void SetFrameDroppedBit() 657 { 658 isFrameDropped_ = true; 659 } 660 ResetFrameDroppedBit()661 void ResetFrameDroppedBit() 662 { 663 isFrameDropped_ = false; 664 } 665 666 bool IsStartGlobalLeakCheck() const; 667 bool EnableGlobalObjectLeakCheck() const; 668 bool EnableGlobalPrimitiveLeakCheck() const; 669 void WriteToStackTraceFd(std::ostringstream &buffer) const; 670 void SetStackTraceFd(int32_t fd); 671 void CloseStackTraceFd(); IncreaseGlobalNumberCount()672 uint32_t IncreaseGlobalNumberCount() 673 { 674 return ++globalNumberCount_; 675 } 676 677 struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(), 678 BCStubEntries, 679 JSTaggedValue, 680 JSTaggedValue, 681 base::AlignedBool, 682 base::AlignedPointer, 683 base::AlignedPointer, 684 base::AlignedPointer, 685 base::AlignedPointer, 686 base::AlignedPointer, 687 RTStubEntries, 688 COStubEntries, 689 BuiltinStubEntries, 690 BCDebuggerStubEntries, 691 base::AlignedUint64, 692 base::AlignedPointer, 693 base::AlignedUint64, 694 base::AlignedUint64, 695 base::AlignedPointer, 696 base::AlignedPointer, 697 base::AlignedUint64, 698 base::AlignedUint64, 699 JSTaggedValue> { 700 enum class Index : size_t { 701 BCStubEntriesIndex = 0, 702 ExceptionIndex, 703 GlobalObjIndex, 704 StableArrayElementsGuardiansIndex, 705 CurrentFrameIndex, 706 LeaveFrameIndex, 707 LastFpIndex, 708 NewSpaceAllocationTopAddressIndex, 709 NewSpaceAllocationEndAddressIndex, 710 RTStubEntriesIndex, 711 COStubEntriesIndex, 712 BuiltinsStubEntriesIndex, 713 BCDebuggerStubEntriesIndex, 714 StateBitFieldIndex, 715 FrameBaseIndex, 716 StackStartIndex, 717 StackLimitIndex, 718 GlueGlobalEnvIndex, 719 GlobalConstIndex, 720 AllowCrossThreadExecutionIndex, 721 InterruptVectorIndex, 722 IsStartHeapSamplingIndex, 723 NumOfMembers 724 }; 725 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes); 726 GetExceptionOffsetGlueData727 static size_t GetExceptionOffset(bool isArch32) 728 { 729 return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32); 730 } 731 GetGlobalObjOffsetGlueData732 static size_t GetGlobalObjOffset(bool isArch32) 733 { 734 return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32); 735 } 736 GetStableArrayElementsGuardiansOffsetGlueData737 static size_t GetStableArrayElementsGuardiansOffset(bool isArch32) 738 { 739 return GetOffset<static_cast<size_t>(Index::StableArrayElementsGuardiansIndex)>(isArch32); 740 } 741 GetGlobalConstOffsetGlueData742 static size_t GetGlobalConstOffset(bool isArch32) 743 { 744 return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32); 745 } 746 GetStateBitFieldOffsetGlueData747 static size_t GetStateBitFieldOffset(bool isArch32) 748 { 749 return GetOffset<static_cast<size_t>(Index::StateBitFieldIndex)>(isArch32); 750 } 751 GetCurrentFrameOffsetGlueData752 static size_t GetCurrentFrameOffset(bool isArch32) 753 { 754 return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32); 755 } 756 GetLeaveFrameOffsetGlueData757 static size_t GetLeaveFrameOffset(bool isArch32) 758 { 759 return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32); 760 } 761 GetLastFpOffsetGlueData762 static size_t GetLastFpOffset(bool isArch32) 763 { 764 return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32); 765 } 766 GetNewSpaceAllocationTopAddressOffsetGlueData767 static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32) 768 { 769 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32); 770 } 771 GetNewSpaceAllocationEndAddressOffsetGlueData772 static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32) 773 { 774 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32); 775 } 776 GetBCStubEntriesOffsetGlueData777 static size_t GetBCStubEntriesOffset(bool isArch32) 778 { 779 return GetOffset<static_cast<size_t>(Index::BCStubEntriesIndex)>(isArch32); 780 } 781 GetRTStubEntriesOffsetGlueData782 static size_t GetRTStubEntriesOffset(bool isArch32) 783 { 784 return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32); 785 } 786 GetCOStubEntriesOffsetGlueData787 static size_t GetCOStubEntriesOffset(bool isArch32) 788 { 789 return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32); 790 } 791 GetBuiltinsStubEntriesOffsetGlueData792 static size_t GetBuiltinsStubEntriesOffset(bool isArch32) 793 { 794 return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32); 795 } 796 GetBCDebuggerStubEntriesOffsetGlueData797 static size_t GetBCDebuggerStubEntriesOffset(bool isArch32) 798 { 799 return GetOffset<static_cast<size_t>(Index::BCDebuggerStubEntriesIndex)>(isArch32); 800 } 801 GetFrameBaseOffsetGlueData802 static size_t GetFrameBaseOffset(bool isArch32) 803 { 804 return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32); 805 } 806 GetStackLimitOffsetGlueData807 static size_t GetStackLimitOffset(bool isArch32) 808 { 809 return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32); 810 } 811 GetGlueGlobalEnvOffsetGlueData812 static size_t GetGlueGlobalEnvOffset(bool isArch32) 813 { 814 return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32); 815 } 816 GetAllowCrossThreadExecutionOffsetGlueData817 static size_t GetAllowCrossThreadExecutionOffset(bool isArch32) 818 { 819 return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32); 820 } 821 GetInterruptVectorOffsetGlueData822 static size_t GetInterruptVectorOffset(bool isArch32) 823 { 824 return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32); 825 } 826 GetIsStartHeapSamplingOffsetGlueData827 static size_t GetIsStartHeapSamplingOffset(bool isArch32) 828 { 829 return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32); 830 } 831 832 alignas(EAS) BCStubEntries bcStubEntries_; 833 alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()}; 834 alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()}; 835 alignas(EAS) bool stableArrayElementsGuardians_ {true}; 836 alignas(EAS) JSTaggedType *currentFrame_ {nullptr}; 837 alignas(EAS) JSTaggedType *leaveFrame_ {nullptr}; 838 alignas(EAS) JSTaggedType *lastFp_ {nullptr}; 839 alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr}; 840 alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr}; 841 alignas(EAS) RTStubEntries rtStubEntries_; 842 alignas(EAS) COStubEntries coStubEntries_; 843 alignas(EAS) BuiltinStubEntries builtinStubEntries_; 844 alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_; 845 alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL}; 846 alignas(EAS) JSTaggedType *frameBase_ {nullptr}; 847 alignas(EAS) uint64_t stackStart_ {0}; 848 alignas(EAS) uint64_t stackLimit_ {0}; 849 alignas(EAS) GlobalEnv *glueGlobalEnv_; 850 alignas(EAS) GlobalEnvConstants *globalConst_; 851 alignas(EAS) bool allowCrossThreadExecution_ {false}; 852 alignas(EAS) volatile uint64_t interruptVector_ {0}; 853 alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()}; 854 }; 855 STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64); 856 857 void PushContext(EcmaContext *context); 858 void PopContext(); 859 GetCurrentEcmaContext()860 EcmaContext *GetCurrentEcmaContext() const 861 { 862 return currentContext_; 863 } 864 void SwitchCurrentContext(EcmaContext *currentContext, bool isInIterate = false); 865 GetEcmaContexts()866 CVector<EcmaContext *> GetEcmaContexts() 867 { 868 return contexts_; 869 } 870 871 bool EraseContext(EcmaContext *context); 872 873 const GlobalEnvConstants *GetFirstGlobalConst() const; 874 bool IsAllContextsInitialized() const; 875 private: 876 NO_COPY_SEMANTIC(JSThread); 877 NO_MOVE_SEMANTIC(JSThread); SetGlobalConst(GlobalEnvConstants * globalConst)878 void SetGlobalConst(GlobalEnvConstants *globalConst) 879 { 880 glueData_.globalConst_ = globalConst; 881 } SetCurrentEcmaContext(EcmaContext * context)882 void SetCurrentEcmaContext(EcmaContext *context) 883 { 884 currentContext_ = context; 885 } 886 SetArrayHClassIndexMap(const CMap<ElementsKind,ConstantIndex> & map)887 void SetArrayHClassIndexMap(const CMap<ElementsKind, ConstantIndex> &map) 888 { 889 arrayHClassIndexMap_ = map; 890 } 891 892 void DumpStack() DUMP_API_ATTR; 893 894 static size_t GetAsmStackLimit(); 895 896 static bool IsMainThread(); 897 898 static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB; 899 900 GlueData glueData_; 901 std::atomic<ThreadId> id_; 902 EcmaVM *vm_ {nullptr}; 903 904 // MM: handles, global-handles, and aot-stubs. 905 int nestedLevel_ = 0; 906 NativeAreaAllocator *nativeAreaAllocator_ {nullptr}; 907 HeapRegionAllocator *heapRegionAllocator_ {nullptr}; 908 std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {}; 909 910 EcmaGlobalStorage<Node> *globalStorage_ {nullptr}; 911 EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr}; 912 int32_t stackTraceFd_ {-1}; 913 914 std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_; 915 std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_; 916 std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_, 917 WeakClearCallback nativeFinalizeCallBack)> setWeak_; 918 std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_; 919 std::function<bool(uintptr_t addr)> isWeak_; 920 uint32_t globalNumberCount_ {0}; 921 922 // Run-time state 923 bool getStackSignal_ {false}; 924 bool runtimeState_ {false}; 925 bool isAsmInterpreter_ {false}; 926 VmThreadControl *vmThreadControl_ {nullptr}; 927 928 // CpuProfiler 929 bool isProfiling_ {false}; 930 bool gcState_ {false}; 931 std::atomic_bool needProfiling_ {false}; 932 std::string profileName_ {""}; 933 934 bool finalizationCheckState_ {false}; 935 936 bool isFrameDropped_ {false}; 937 938 CMap<ElementsKind, ConstantIndex> arrayHClassIndexMap_; 939 940 CVector<EcmaContext *> contexts_; 941 EcmaContext *currentContext_ {nullptr}; 942 friend class GlobalHandleCollection; 943 friend class EcmaVM; 944 friend class EcmaContext; 945 }; 946 } // namespace panda::ecmascript 947 #endif // ECMASCRIPT_JS_THREAD_H 948