1 /* 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_JS_THREAD_H 17 #define ECMASCRIPT_JS_THREAD_H 18 19 #include <atomic> 20 #include <sstream> 21 22 #include "ecmascript/base/aligned_struct.h" 23 #include "ecmascript/compiler/builtins/builtins_call_signature.h" 24 #include "ecmascript/compiler/common_stubs.h" 25 #include "ecmascript/compiler/interpreter_stub.h" 26 #include "ecmascript/compiler/rt_call_signature.h" 27 #include "ecmascript/dfx/vm_thread_control.h" 28 #include "ecmascript/elements.h" 29 #include "ecmascript/frames.h" 30 #include "ecmascript/global_env_constants.h" 31 #include "ecmascript/mem/visitor.h" 32 33 namespace panda::ecmascript { 34 class EcmaContext; 35 class EcmaVM; 36 class EcmaHandleScope; 37 class HeapRegionAllocator; 38 class PropertiesCache; 39 template<typename T> 40 class EcmaGlobalStorage; 41 class Node; 42 class DebugNode; 43 class VmThreadControl; 44 using WeakClearCallback = void (*)(void *); 45 46 enum class MarkStatus : uint8_t { 47 READY_TO_MARK, 48 MARKING, 49 MARK_FINISHED, 50 }; 51 52 enum class PGOProfilerStatus : uint8_t { 53 PGO_PROFILER_DISABLE, 54 PGO_PROFILER_ENABLE, 55 }; 56 57 enum class BCStubStatus: uint8_t { 58 NORMAL_BC_STUB, 59 PROFILE_BC_STUB, 60 }; 61 62 enum class StableArrayChangeKind { PROTO, NOT_PROTO }; 63 64 struct BCStubEntries { 65 static constexpr size_t EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT = 66 kungfu::BytecodeStubCSigns::NUM_OF_ALL_NORMAL_STUBS; 67 // The number of bytecodes. 68 static constexpr size_t BC_HANDLER_COUNT = kungfu::BytecodeStubCSigns::LAST_VALID_OPCODE + 1; 69 static constexpr size_t COUNT = kungfu::BytecodeStubCSigns::NUM_OF_STUBS; 70 static_assert(EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT <= COUNT); 71 Address stubEntries_[COUNT] = {0}; 72 73 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 74 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 75 SetBCStubEntries76 void Set(size_t index, Address addr) 77 { 78 ASSERT(index < COUNT); 79 stubEntries_[index] = addr; 80 } 81 GetAddrBCStubEntries82 Address* GetAddr() 83 { 84 return reinterpret_cast<Address*>(stubEntries_); 85 } 86 GetBCStubEntries87 Address Get(size_t index) const 88 { 89 ASSERT(index < COUNT); 90 return stubEntries_[index]; 91 } 92 }; 93 STATIC_ASSERT_EQ_ARCH(sizeof(BCStubEntries), BCStubEntries::SizeArch32, BCStubEntries::SizeArch64); 94 95 struct RTStubEntries { 96 static constexpr size_t COUNT = kungfu::RuntimeStubCSigns::NUM_OF_STUBS; 97 Address stubEntries_[COUNT]; 98 99 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 100 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 101 SetRTStubEntries102 void Set(size_t index, Address addr) 103 { 104 ASSERT(index < COUNT); 105 stubEntries_[index] = addr; 106 } 107 GetRTStubEntries108 Address Get(size_t index) const 109 { 110 ASSERT(index < COUNT); 111 return stubEntries_[index]; 112 } 113 }; 114 STATIC_ASSERT_EQ_ARCH(sizeof(RTStubEntries), RTStubEntries::SizeArch32, RTStubEntries::SizeArch64); 115 116 struct COStubEntries { 117 static constexpr size_t COUNT = kungfu::CommonStubCSigns::NUM_OF_STUBS; 118 Address stubEntries_[COUNT]; 119 120 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 121 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 122 SetCOStubEntries123 void Set(size_t index, Address addr) 124 { 125 ASSERT(index < COUNT); 126 stubEntries_[index] = addr; 127 } 128 GetCOStubEntries129 Address Get(size_t index) const 130 { 131 ASSERT(index < COUNT); 132 return stubEntries_[index]; 133 } 134 }; 135 136 struct BCDebuggerStubEntries { 137 static constexpr size_t EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT = 138 kungfu::BytecodeStubCSigns::NUM_OF_ALL_NORMAL_STUBS; 139 static constexpr size_t COUNT = kungfu::BytecodeStubCSigns::LAST_VALID_OPCODE + 1; 140 Address stubEntries_[COUNT]; 141 142 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 143 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 144 SetBCDebuggerStubEntries145 void Set(size_t index, Address addr) 146 { 147 ASSERT(index < COUNT); 148 stubEntries_[index] = addr; 149 } 150 GetBCDebuggerStubEntries151 Address Get(size_t index) const 152 { 153 ASSERT(index < COUNT); 154 return stubEntries_[index]; 155 } 156 SetNonexistentBCHandlerStubEntriesBCDebuggerStubEntries157 void SetNonexistentBCHandlerStubEntries(Address addr) 158 { 159 for (size_t i = EXISTING_BC_HANDLER_STUB_ENTRIES_COUNT; i < COUNT; i++) { 160 if (stubEntries_[i] == 0) { 161 stubEntries_[i] = addr; 162 } 163 } 164 } 165 }; 166 167 struct BuiltinStubEntries { 168 static constexpr size_t COUNT = kungfu::BuiltinsStubCSigns::NUM_OF_BUILTINS_STUBS; 169 Address stubEntries_[COUNT]; 170 171 static constexpr size_t SizeArch32 = sizeof(uint32_t) * COUNT; 172 static constexpr size_t SizeArch64 = sizeof(uint64_t) * COUNT; 173 SetBuiltinStubEntries174 void Set(size_t index, Address addr) 175 { 176 ASSERT(index < COUNT); 177 stubEntries_[index] = addr; 178 } 179 GetBuiltinStubEntries180 Address Get(size_t index) const 181 { 182 ASSERT(index < COUNT); 183 return stubEntries_[index]; 184 } 185 }; 186 STATIC_ASSERT_EQ_ARCH(sizeof(COStubEntries), COStubEntries::SizeArch32, COStubEntries::SizeArch64); 187 188 class JSThread { 189 public: 190 static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2; 191 static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8; 192 static constexpr int PGO_PROFILER_BITFIELD_START = 16; 193 static constexpr int BOOL_BITFIELD_NUM = 1; 194 static constexpr uint32_t RESERVE_STACK_SIZE = 128; 195 using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>; 196 using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>; 197 using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>; 198 using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag; 199 using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>; 200 using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BOOL_BITFIELD_NUM>; 201 using ThreadId = uint32_t; 202 203 explicit JSThread(EcmaVM *vm); 204 205 PUBLIC_API ~JSThread(); 206 GetEcmaVM()207 EcmaVM *GetEcmaVM() const 208 { 209 return vm_; 210 } 211 212 static JSThread *Create(EcmaVM *vm); 213 GetNestedLevel()214 int GetNestedLevel() const 215 { 216 return nestedLevel_; 217 } 218 SetNestedLevel(int level)219 void SetNestedLevel(int level) 220 { 221 nestedLevel_ = level; 222 } 223 SetLastFp(JSTaggedType * fp)224 void SetLastFp(JSTaggedType *fp) 225 { 226 glueData_.lastFp_ = fp; 227 } 228 GetLastFp()229 const JSTaggedType *GetLastFp() const 230 { 231 return glueData_.lastFp_; 232 } 233 GetCurrentSPFrame()234 const JSTaggedType *GetCurrentSPFrame() const 235 { 236 return glueData_.currentFrame_; 237 } 238 SetCurrentSPFrame(JSTaggedType * sp)239 void SetCurrentSPFrame(JSTaggedType *sp) 240 { 241 glueData_.currentFrame_ = sp; 242 } 243 GetLastLeaveFrame()244 const JSTaggedType *GetLastLeaveFrame() const 245 { 246 return glueData_.leaveFrame_; 247 } 248 SetLastLeaveFrame(JSTaggedType * sp)249 void SetLastLeaveFrame(JSTaggedType *sp) 250 { 251 glueData_.leaveFrame_ = sp; 252 } 253 254 const JSTaggedType *GetCurrentFrame() const; 255 256 void SetCurrentFrame(JSTaggedType *sp); 257 258 const JSTaggedType *GetCurrentInterpretedFrame() const; 259 260 bool DoStackOverflowCheck(const JSTaggedType *sp); 261 262 bool DoAsmStackOverflowCheck(); 263 264 bool DoStackLimitCheck(); 265 GetNativeAreaAllocator()266 NativeAreaAllocator *GetNativeAreaAllocator() const 267 { 268 return nativeAreaAllocator_; 269 } 270 GetHeapRegionAllocator()271 HeapRegionAllocator *GetHeapRegionAllocator() const 272 { 273 return heapRegionAllocator_; 274 } 275 ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)276 void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 277 { 278 glueData_.newSpaceAllocationTopAddress_ = top; 279 glueData_.newSpaceAllocationEndAddress_ = end; 280 } 281 SetIsStartHeapSampling(bool isStart)282 void SetIsStartHeapSampling(bool isStart) 283 { 284 glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False(); 285 } 286 287 void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor, 288 const RootBaseAndDerivedVisitor &derivedVisitor); 289 290 void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor); 291 292 uintptr_t* PUBLIC_API ExpandHandleStorage(); 293 void PUBLIC_API ShrinkHandleStorage(int prevIndex); 294 void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const; 295 bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const; 296 GetWeakNodeNativeFinalizeCallbacks()297 std::vector<std::pair<WeakClearCallback, void *>> *GetWeakNodeNativeFinalizeCallbacks() 298 { 299 return &weakNodeNativeFinalizeCallbacks_; 300 } 301 302 void SetException(JSTaggedValue exception); 303 GetException()304 JSTaggedValue GetException() const 305 { 306 return glueData_.exception_; 307 } 308 HasPendingException()309 bool HasPendingException() const 310 { 311 return !glueData_.exception_.IsHole(); 312 } 313 314 void ClearException(); 315 SetGlobalObject(JSTaggedValue globalObject)316 void SetGlobalObject(JSTaggedValue globalObject) 317 { 318 glueData_.globalObject_ = globalObject; 319 } 320 GlobalConstants()321 const GlobalEnvConstants *GlobalConstants() const 322 { 323 return glueData_.globalConst_; 324 } 325 GetArrayHClassIndexMap()326 const CMap<ElementsKind, ConstantIndex> &GetArrayHClassIndexMap() const 327 { 328 return arrayHClassIndexMap_; 329 } 330 331 void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver, StableArrayChangeKind changeKind); 332 IsStableArrayElementsGuardiansInvalid()333 bool IsStableArrayElementsGuardiansInvalid() const 334 { 335 return !glueData_.stableArrayElementsGuardians_; 336 } 337 338 void ResetGuardians(); 339 340 JSTaggedValue GetCurrentLexenv() const; 341 RegisterRTInterface(size_t id,Address addr)342 void RegisterRTInterface(size_t id, Address addr) 343 { 344 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 345 glueData_.rtStubEntries_.Set(id, addr); 346 } 347 GetRTInterface(size_t id)348 Address GetRTInterface(size_t id) const 349 { 350 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 351 return glueData_.rtStubEntries_.Get(id); 352 } 353 GetFastStubEntry(uint32_t id)354 Address GetFastStubEntry(uint32_t id) const 355 { 356 return glueData_.coStubEntries_.Get(id); 357 } 358 SetFastStubEntry(size_t id,Address entry)359 void SetFastStubEntry(size_t id, Address entry) 360 { 361 glueData_.coStubEntries_.Set(id, entry); 362 } 363 GetBuiltinStubEntry(uint32_t id)364 Address GetBuiltinStubEntry(uint32_t id) const 365 { 366 return glueData_.builtinStubEntries_.Get(id); 367 } 368 SetBuiltinStubEntry(size_t id,Address entry)369 void SetBuiltinStubEntry(size_t id, Address entry) 370 { 371 glueData_.builtinStubEntries_.Set(id, entry); 372 } 373 GetBCStubEntry(uint32_t id)374 Address GetBCStubEntry(uint32_t id) const 375 { 376 return glueData_.bcStubEntries_.Get(id); 377 } 378 SetBCStubEntry(size_t id,Address entry)379 void SetBCStubEntry(size_t id, Address entry) 380 { 381 glueData_.bcStubEntries_.Set(id, entry); 382 } 383 SetBCDebugStubEntry(size_t id,Address entry)384 void SetBCDebugStubEntry(size_t id, Address entry) 385 { 386 glueData_.bcDebuggerStubEntries_.Set(id, entry); 387 } 388 GetBytecodeHandler()389 Address *GetBytecodeHandler() 390 { 391 return glueData_.bcStubEntries_.GetAddr(); 392 } 393 394 void PUBLIC_API CheckSwitchDebuggerBCStub(); 395 void CheckOrSwitchPGOStubs(); 396 GetThreadId()397 ThreadId GetThreadId() const 398 { 399 return id_.load(std::memory_order_relaxed); 400 } 401 SetThreadId()402 void SetThreadId() 403 { 404 id_.store(JSThread::GetCurrentThreadId(), std::memory_order_relaxed); 405 } 406 GetCurrentThreadId()407 static ThreadId GetCurrentThreadId() 408 { 409 return os::thread::GetCurrentThreadId(); 410 } 411 412 void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor); 413 414 PropertiesCache *GetPropertiesCache() const; 415 SetMarkStatus(MarkStatus status)416 void SetMarkStatus(MarkStatus status) 417 { 418 MarkStatusBits::Set(status, &glueData_.gcStateBitField_); 419 } 420 IsReadyToMark()421 bool IsReadyToMark() const 422 { 423 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 424 return status == MarkStatus::READY_TO_MARK; 425 } 426 IsMarking()427 bool IsMarking() const 428 { 429 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 430 return status == MarkStatus::MARKING; 431 } 432 IsMarkFinished()433 bool IsMarkFinished() const 434 { 435 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 436 return status == MarkStatus::MARK_FINISHED; 437 } 438 SetPGOProfilerEnable(bool enable)439 void SetPGOProfilerEnable(bool enable) 440 { 441 PGOProfilerStatus status = 442 enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE; 443 PGOStatusBits::Set(status, &glueData_.interruptVector_); 444 } 445 IsPGOProfilerEnable()446 bool IsPGOProfilerEnable() const 447 { 448 auto status = PGOStatusBits::Decode(glueData_.interruptVector_); 449 return status == PGOProfilerStatus::PGO_PROFILER_ENABLE; 450 } 451 SetBCStubStatus(BCStubStatus status)452 void SetBCStubStatus(BCStubStatus status) 453 { 454 BCStubStatusBits::Set(status, &glueData_.interruptVector_); 455 } 456 GetBCStubStatus()457 BCStubStatus GetBCStubStatus() const 458 { 459 return BCStubStatusBits::Decode(glueData_.interruptVector_); 460 } 461 462 bool CheckSafepoint(); 463 SetGetStackSignal(bool isParseStack)464 void SetGetStackSignal(bool isParseStack) 465 { 466 getStackSignal_ = isParseStack; 467 } 468 GetStackSignal()469 bool GetStackSignal() const 470 { 471 return getStackSignal_; 472 } 473 SetNeedProfiling(bool needProfiling)474 void SetNeedProfiling(bool needProfiling) 475 { 476 needProfiling_.store(needProfiling); 477 } 478 SetIsProfiling(bool isProfiling)479 void SetIsProfiling(bool isProfiling) 480 { 481 isProfiling_ = isProfiling; 482 } 483 GetIsProfiling()484 bool GetIsProfiling() 485 { 486 return isProfiling_; 487 } 488 SetGcState(bool gcState)489 void SetGcState(bool gcState) 490 { 491 gcState_ = gcState; 492 } 493 GetGcState()494 bool GetGcState() const 495 { 496 return gcState_; 497 } 498 SetRuntimeState(bool runtimeState)499 void SetRuntimeState(bool runtimeState) 500 { 501 runtimeState_ = runtimeState; 502 } 503 GetRuntimeState()504 bool GetRuntimeState() const 505 { 506 return runtimeState_; 507 } 508 SetCpuProfileName(std::string & profileName)509 void SetCpuProfileName(std::string &profileName) 510 { 511 profileName_ = profileName; 512 } 513 EnableAsmInterpreter()514 void EnableAsmInterpreter() 515 { 516 isAsmInterpreter_ = true; 517 } 518 IsAsmInterpreter()519 bool IsAsmInterpreter() const 520 { 521 return isAsmInterpreter_; 522 } 523 GetVmThreadControl()524 VmThreadControl *GetVmThreadControl() const 525 { 526 return vmThreadControl_; 527 } 528 GetGlueDataOffset()529 static constexpr size_t GetGlueDataOffset() 530 { 531 return MEMBER_OFFSET(JSThread, glueData_); 532 } 533 GetGlueAddr()534 uintptr_t GetGlueAddr() const 535 { 536 return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset(); 537 } 538 GlueToJSThread(uintptr_t glue)539 static JSThread *GlueToJSThread(uintptr_t glue) 540 { 541 // very careful to modify here 542 return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset()); 543 } 544 SetCheckSafePointStatus()545 void SetCheckSafePointStatus() 546 { 547 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 548 CheckSafePointBit::Set(true, &glueData_.interruptVector_); 549 } 550 ResetCheckSafePointStatus()551 void ResetCheckSafePointStatus() 552 { 553 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 554 CheckSafePointBit::Set(false, &glueData_.interruptVector_); 555 } 556 SetVMNeedSuspension(bool flag)557 void SetVMNeedSuspension(bool flag) 558 { 559 VMNeedSuspensionBit::Set(flag, &glueData_.interruptVector_); 560 } 561 VMNeedSuspension()562 bool VMNeedSuspension() 563 { 564 return VMNeedSuspensionBit::Decode(glueData_.interruptVector_); 565 } 566 SetVMSuspended(bool flag)567 void SetVMSuspended(bool flag) 568 { 569 VMHasSuspendedBit::Set(flag, &glueData_.interruptVector_); 570 } 571 IsVMSuspended()572 bool IsVMSuspended() 573 { 574 return VMHasSuspendedBit::Decode(glueData_.interruptVector_); 575 } 576 GetCurrentStackPosition()577 static uintptr_t GetCurrentStackPosition() 578 { 579 return reinterpret_cast<uintptr_t>(__builtin_frame_address(0)); 580 } 581 582 bool IsLegalAsmSp(uintptr_t sp) const; 583 584 bool IsLegalThreadSp(uintptr_t sp) const; 585 586 bool IsLegalSp(uintptr_t sp) const; 587 SetCheckAndCallEnterState(bool state)588 void SetCheckAndCallEnterState(bool state) 589 { 590 finalizationCheckState_ = state; 591 } 592 GetCheckAndCallEnterState()593 bool GetCheckAndCallEnterState() const 594 { 595 return finalizationCheckState_; 596 } 597 GetStackStart()598 uint64_t GetStackStart() const 599 { 600 return glueData_.stackStart_; 601 } 602 GetStackLimit()603 uint64_t GetStackLimit() const 604 { 605 return glueData_.stackLimit_; 606 } 607 GetGlueGlobalEnv()608 GlobalEnv *GetGlueGlobalEnv() 609 { 610 return glueData_.glueGlobalEnv_; 611 } 612 SetGlueGlobalEnv(GlobalEnv * global)613 void SetGlueGlobalEnv(GlobalEnv *global) 614 { 615 ASSERT(global != nullptr); 616 glueData_.glueGlobalEnv_ = global; 617 } 618 NewGlobalHandle(JSTaggedType value)619 inline uintptr_t NewGlobalHandle(JSTaggedType value) 620 { 621 return newGlobalHandle_(value); 622 } 623 DisposeGlobalHandle(uintptr_t nodeAddr)624 inline void DisposeGlobalHandle(uintptr_t nodeAddr) 625 { 626 disposeGlobalHandle_(nodeAddr); 627 } 628 629 inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr, 630 WeakClearCallback nativeFinalizeCallBack = nullptr) 631 { 632 return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack); 633 } 634 ClearWeak(uintptr_t nodeAddr)635 inline uintptr_t ClearWeak(uintptr_t nodeAddr) 636 { 637 return clearWeak_(nodeAddr); 638 } 639 IsWeak(uintptr_t addr)640 inline bool IsWeak(uintptr_t addr) const 641 { 642 return isWeak_(addr); 643 } 644 EnableCrossThreadExecution()645 void EnableCrossThreadExecution() 646 { 647 glueData_.allowCrossThreadExecution_ = true; 648 } 649 IsCrossThreadExecutionEnable()650 bool IsCrossThreadExecutionEnable() const 651 { 652 return glueData_.allowCrossThreadExecution_; 653 } 654 GetFrameDroppedBit()655 bool GetFrameDroppedBit() 656 { 657 return isFrameDropped_; 658 } 659 SetFrameDroppedBit()660 void SetFrameDroppedBit() 661 { 662 isFrameDropped_ = true; 663 } 664 ResetFrameDroppedBit()665 void ResetFrameDroppedBit() 666 { 667 isFrameDropped_ = false; 668 } 669 670 bool IsStartGlobalLeakCheck() const; 671 bool EnableGlobalObjectLeakCheck() const; 672 bool EnableGlobalPrimitiveLeakCheck() const; 673 void WriteToStackTraceFd(std::ostringstream &buffer) const; 674 void SetStackTraceFd(int32_t fd); 675 void CloseStackTraceFd(); IncreaseGlobalNumberCount()676 uint32_t IncreaseGlobalNumberCount() 677 { 678 return ++globalNumberCount_; 679 } 680 681 struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(), 682 BCStubEntries, 683 JSTaggedValue, 684 JSTaggedValue, 685 base::AlignedBool, 686 base::AlignedPointer, 687 base::AlignedPointer, 688 base::AlignedPointer, 689 base::AlignedPointer, 690 base::AlignedPointer, 691 RTStubEntries, 692 COStubEntries, 693 BuiltinStubEntries, 694 BCDebuggerStubEntries, 695 base::AlignedUint64, 696 base::AlignedPointer, 697 base::AlignedUint64, 698 base::AlignedUint64, 699 base::AlignedPointer, 700 base::AlignedPointer, 701 base::AlignedUint64, 702 base::AlignedUint64, 703 JSTaggedValue> { 704 enum class Index : size_t { 705 BCStubEntriesIndex = 0, 706 ExceptionIndex, 707 GlobalObjIndex, 708 StableArrayElementsGuardiansIndex, 709 CurrentFrameIndex, 710 LeaveFrameIndex, 711 LastFpIndex, 712 NewSpaceAllocationTopAddressIndex, 713 NewSpaceAllocationEndAddressIndex, 714 RTStubEntriesIndex, 715 COStubEntriesIndex, 716 BuiltinsStubEntriesIndex, 717 BCDebuggerStubEntriesIndex, 718 StateBitFieldIndex, 719 FrameBaseIndex, 720 StackStartIndex, 721 StackLimitIndex, 722 GlueGlobalEnvIndex, 723 GlobalConstIndex, 724 AllowCrossThreadExecutionIndex, 725 InterruptVectorIndex, 726 IsStartHeapSamplingIndex, 727 NumOfMembers 728 }; 729 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes); 730 GetExceptionOffsetGlueData731 static size_t GetExceptionOffset(bool isArch32) 732 { 733 return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32); 734 } 735 GetGlobalObjOffsetGlueData736 static size_t GetGlobalObjOffset(bool isArch32) 737 { 738 return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32); 739 } 740 GetStableArrayElementsGuardiansOffsetGlueData741 static size_t GetStableArrayElementsGuardiansOffset(bool isArch32) 742 { 743 return GetOffset<static_cast<size_t>(Index::StableArrayElementsGuardiansIndex)>(isArch32); 744 } 745 GetGlobalConstOffsetGlueData746 static size_t GetGlobalConstOffset(bool isArch32) 747 { 748 return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32); 749 } 750 GetStateBitFieldOffsetGlueData751 static size_t GetStateBitFieldOffset(bool isArch32) 752 { 753 return GetOffset<static_cast<size_t>(Index::StateBitFieldIndex)>(isArch32); 754 } 755 GetCurrentFrameOffsetGlueData756 static size_t GetCurrentFrameOffset(bool isArch32) 757 { 758 return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32); 759 } 760 GetLeaveFrameOffsetGlueData761 static size_t GetLeaveFrameOffset(bool isArch32) 762 { 763 return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32); 764 } 765 GetLastFpOffsetGlueData766 static size_t GetLastFpOffset(bool isArch32) 767 { 768 return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32); 769 } 770 GetNewSpaceAllocationTopAddressOffsetGlueData771 static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32) 772 { 773 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32); 774 } 775 GetNewSpaceAllocationEndAddressOffsetGlueData776 static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32) 777 { 778 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32); 779 } 780 GetBCStubEntriesOffsetGlueData781 static size_t GetBCStubEntriesOffset(bool isArch32) 782 { 783 return GetOffset<static_cast<size_t>(Index::BCStubEntriesIndex)>(isArch32); 784 } 785 GetRTStubEntriesOffsetGlueData786 static size_t GetRTStubEntriesOffset(bool isArch32) 787 { 788 return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32); 789 } 790 GetCOStubEntriesOffsetGlueData791 static size_t GetCOStubEntriesOffset(bool isArch32) 792 { 793 return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32); 794 } 795 GetBuiltinsStubEntriesOffsetGlueData796 static size_t GetBuiltinsStubEntriesOffset(bool isArch32) 797 { 798 return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32); 799 } 800 GetBCDebuggerStubEntriesOffsetGlueData801 static size_t GetBCDebuggerStubEntriesOffset(bool isArch32) 802 { 803 return GetOffset<static_cast<size_t>(Index::BCDebuggerStubEntriesIndex)>(isArch32); 804 } 805 GetFrameBaseOffsetGlueData806 static size_t GetFrameBaseOffset(bool isArch32) 807 { 808 return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32); 809 } 810 GetStackLimitOffsetGlueData811 static size_t GetStackLimitOffset(bool isArch32) 812 { 813 return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32); 814 } 815 GetGlueGlobalEnvOffsetGlueData816 static size_t GetGlueGlobalEnvOffset(bool isArch32) 817 { 818 return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32); 819 } 820 GetAllowCrossThreadExecutionOffsetGlueData821 static size_t GetAllowCrossThreadExecutionOffset(bool isArch32) 822 { 823 return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32); 824 } 825 GetInterruptVectorOffsetGlueData826 static size_t GetInterruptVectorOffset(bool isArch32) 827 { 828 return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32); 829 } 830 GetIsStartHeapSamplingOffsetGlueData831 static size_t GetIsStartHeapSamplingOffset(bool isArch32) 832 { 833 return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32); 834 } 835 836 alignas(EAS) BCStubEntries bcStubEntries_; 837 alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()}; 838 alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()}; 839 alignas(EAS) bool stableArrayElementsGuardians_ {true}; 840 alignas(EAS) JSTaggedType *currentFrame_ {nullptr}; 841 alignas(EAS) JSTaggedType *leaveFrame_ {nullptr}; 842 alignas(EAS) JSTaggedType *lastFp_ {nullptr}; 843 alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr}; 844 alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr}; 845 alignas(EAS) RTStubEntries rtStubEntries_; 846 alignas(EAS) COStubEntries coStubEntries_; 847 alignas(EAS) BuiltinStubEntries builtinStubEntries_; 848 alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_; 849 alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL}; 850 alignas(EAS) JSTaggedType *frameBase_ {nullptr}; 851 alignas(EAS) uint64_t stackStart_ {0}; 852 alignas(EAS) uint64_t stackLimit_ {0}; 853 alignas(EAS) GlobalEnv *glueGlobalEnv_; 854 alignas(EAS) GlobalEnvConstants *globalConst_; 855 alignas(EAS) bool allowCrossThreadExecution_ {false}; 856 alignas(EAS) volatile uint64_t interruptVector_ {0}; 857 alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()}; 858 }; 859 STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64); 860 861 void PushContext(EcmaContext *context); 862 void PopContext(); 863 GetCurrentEcmaContext()864 EcmaContext *GetCurrentEcmaContext() const 865 { 866 return currentContext_; 867 } 868 void SwitchCurrentContext(EcmaContext *currentContext, bool isInIterate = false); 869 GetEcmaContexts()870 CVector<EcmaContext *> GetEcmaContexts() 871 { 872 return contexts_; 873 } 874 875 bool EraseContext(EcmaContext *context); 876 877 const GlobalEnvConstants *GetFirstGlobalConst() const; 878 bool IsAllContextsInitialized() const; 879 private: 880 NO_COPY_SEMANTIC(JSThread); 881 NO_MOVE_SEMANTIC(JSThread); SetGlobalConst(GlobalEnvConstants * globalConst)882 void SetGlobalConst(GlobalEnvConstants *globalConst) 883 { 884 glueData_.globalConst_ = globalConst; 885 } SetCurrentEcmaContext(EcmaContext * context)886 void SetCurrentEcmaContext(EcmaContext *context) 887 { 888 currentContext_ = context; 889 } 890 SetArrayHClassIndexMap(const CMap<ElementsKind,ConstantIndex> & map)891 void SetArrayHClassIndexMap(const CMap<ElementsKind, ConstantIndex> &map) 892 { 893 arrayHClassIndexMap_ = map; 894 } 895 896 void DumpStack() DUMP_API_ATTR; 897 898 static size_t GetAsmStackLimit(); 899 900 static bool IsMainThread(); 901 902 static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB; 903 904 GlueData glueData_; 905 std::atomic<ThreadId> id_; 906 EcmaVM *vm_ {nullptr}; 907 908 // MM: handles, global-handles, and aot-stubs. 909 int nestedLevel_ = 0; 910 NativeAreaAllocator *nativeAreaAllocator_ {nullptr}; 911 HeapRegionAllocator *heapRegionAllocator_ {nullptr}; 912 std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {}; 913 914 EcmaGlobalStorage<Node> *globalStorage_ {nullptr}; 915 EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr}; 916 int32_t stackTraceFd_ {-1}; 917 918 std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_; 919 std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_; 920 std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_, 921 WeakClearCallback nativeFinalizeCallBack)> setWeak_; 922 std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_; 923 std::function<bool(uintptr_t addr)> isWeak_; 924 uint32_t globalNumberCount_ {0}; 925 926 // Run-time state 927 bool getStackSignal_ {false}; 928 bool runtimeState_ {false}; 929 bool isAsmInterpreter_ {false}; 930 VmThreadControl *vmThreadControl_ {nullptr}; 931 932 // CpuProfiler 933 bool isProfiling_ {false}; 934 bool gcState_ {false}; 935 std::atomic_bool needProfiling_ {false}; 936 std::string profileName_ {""}; 937 938 bool finalizationCheckState_ {false}; 939 940 bool isFrameDropped_ {false}; 941 942 CMap<ElementsKind, ConstantIndex> arrayHClassIndexMap_; 943 944 CVector<EcmaContext *> contexts_; 945 EcmaContext *currentContext_ {nullptr}; 946 friend class GlobalHandleCollection; 947 friend class EcmaVM; 948 friend class EcmaContext; 949 }; 950 } // namespace panda::ecmascript 951 #endif // ECMASCRIPT_JS_THREAD_H 952