1 /* 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_JS_THREAD_H 17 #define ECMASCRIPT_JS_THREAD_H 18 19 #include <atomic> 20 #include <sstream> 21 #include <string> 22 23 #include "ecmascript/base/aligned_struct.h" 24 #include "ecmascript/builtin_entries.h" 25 #include "ecmascript/elements.h" 26 #include "ecmascript/frames.h" 27 #include "ecmascript/global_env_constants.h" 28 #include "ecmascript/js_object_resizing_strategy.h" 29 #include "ecmascript/js_tagged_value.h" 30 #include "ecmascript/js_thread_hclass_entries.h" 31 #include "ecmascript/js_thread_stub_entries.h" 32 #include "ecmascript/mem/visitor.h" 33 34 namespace panda::ecmascript { 35 class EcmaContext; 36 class EcmaVM; 37 class EcmaHandleScope; 38 class HeapRegionAllocator; 39 class PropertiesCache; 40 template<typename T> 41 class EcmaGlobalStorage; 42 class Node; 43 class SingleCharTable; 44 class DebugNode; 45 class VmThreadControl; 46 using WeakClearCallback = void (*)(void *); 47 48 enum class MarkStatus : uint8_t { 49 READY_TO_MARK, 50 MARKING, 51 MARK_FINISHED, 52 }; 53 54 enum class PGOProfilerStatus : uint8_t { 55 PGO_PROFILER_DISABLE, 56 PGO_PROFILER_ENABLE, 57 }; 58 59 enum class BCStubStatus: uint8_t { 60 NORMAL_BC_STUB, 61 PROFILE_BC_STUB, 62 JIT_PROFILE_BC_STUB, 63 }; 64 65 enum class StableArrayChangeKind { PROTO, NOT_PROTO }; 66 67 static constexpr uint32_t MAIN_THREAD_INDEX = 0; 68 69 class JSThread { 70 public: 71 static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2; 72 static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8; 73 static constexpr int PGO_PROFILER_BITFIELD_START = 16; 74 static constexpr int BOOL_BITFIELD_NUM = 1; 75 static constexpr int BCSTUBSTATUS_BITFIELD_NUM = 2; 76 static constexpr uint32_t RESERVE_STACK_SIZE = 128; 77 using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>; 78 using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>; 79 using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>; 80 using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag; 81 using VMNeedTerminationBit = VMHasSuspendedBit::NextFlag; 82 using VMHasTerminatedBit = VMNeedTerminationBit::NextFlag; 83 using InstallMachineCodeBit = VMHasTerminatedBit::NextFlag; 84 using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>; 85 using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BCSTUBSTATUS_BITFIELD_NUM>; 86 using ThreadId = uint32_t; 87 88 enum FrameDroppedState { 89 StateFalse = 0, 90 StateTrue, 91 StatePending 92 }; 93 94 explicit JSThread(EcmaVM *vm); 95 96 PUBLIC_API ~JSThread(); 97 GetEcmaVM()98 EcmaVM *GetEcmaVM() const 99 { 100 return vm_; 101 } 102 103 static JSThread *Create(EcmaVM *vm); 104 GetNestedLevel()105 int GetNestedLevel() const 106 { 107 return nestedLevel_; 108 } 109 SetNestedLevel(int level)110 void SetNestedLevel(int level) 111 { 112 nestedLevel_ = level; 113 } 114 SetLastFp(JSTaggedType * fp)115 void SetLastFp(JSTaggedType *fp) 116 { 117 glueData_.lastFp_ = fp; 118 } 119 GetLastFp()120 const JSTaggedType *GetLastFp() const 121 { 122 return glueData_.lastFp_; 123 } 124 GetCurrentSPFrame()125 const JSTaggedType *GetCurrentSPFrame() const 126 { 127 return glueData_.currentFrame_; 128 } 129 SetCurrentSPFrame(JSTaggedType * sp)130 void SetCurrentSPFrame(JSTaggedType *sp) 131 { 132 glueData_.currentFrame_ = sp; 133 } 134 GetLastLeaveFrame()135 const JSTaggedType *GetLastLeaveFrame() const 136 { 137 return glueData_.leaveFrame_; 138 } 139 SetLastLeaveFrame(JSTaggedType * sp)140 void SetLastLeaveFrame(JSTaggedType *sp) 141 { 142 glueData_.leaveFrame_ = sp; 143 } 144 145 const JSTaggedType *GetCurrentFrame() const; 146 147 void SetCurrentFrame(JSTaggedType *sp); 148 149 const JSTaggedType *GetCurrentInterpretedFrame() const; 150 151 bool DoStackOverflowCheck(const JSTaggedType *sp); 152 153 bool DoAsmStackOverflowCheck(); 154 155 bool DoStackLimitCheck(); 156 GetNativeAreaAllocator()157 NativeAreaAllocator *GetNativeAreaAllocator() const 158 { 159 return nativeAreaAllocator_; 160 } 161 GetHeapRegionAllocator()162 HeapRegionAllocator *GetHeapRegionAllocator() const 163 { 164 return heapRegionAllocator_; 165 } 166 ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)167 void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 168 { 169 glueData_.newSpaceAllocationTopAddress_ = top; 170 glueData_.newSpaceAllocationEndAddress_ = end; 171 } 172 SetIsStartHeapSampling(bool isStart)173 void SetIsStartHeapSampling(bool isStart) 174 { 175 glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False(); 176 } 177 SetIsTracing(bool isTracing)178 void SetIsTracing(bool isTracing) 179 { 180 glueData_.isTracing_ = isTracing; 181 } 182 183 void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor, 184 const RootBaseAndDerivedVisitor &derivedVisitor); 185 186 void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor); 187 188 uintptr_t* PUBLIC_API ExpandHandleStorage(); 189 void PUBLIC_API ShrinkHandleStorage(int prevIndex); 190 void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const; 191 bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const; 192 GetWeakNodeNativeFinalizeCallbacks()193 std::vector<std::pair<WeakClearCallback, void *>> *GetWeakNodeNativeFinalizeCallbacks() 194 { 195 return &weakNodeNativeFinalizeCallbacks_; 196 } 197 198 void SetException(JSTaggedValue exception); 199 GetException()200 JSTaggedValue GetException() const 201 { 202 return glueData_.exception_; 203 } 204 HasPendingException()205 bool HasPendingException() const 206 { 207 return !glueData_.exception_.IsHole(); 208 } 209 210 void ClearException(); 211 SetGlobalObject(JSTaggedValue globalObject)212 void SetGlobalObject(JSTaggedValue globalObject) 213 { 214 glueData_.globalObject_ = globalObject; 215 } 216 GlobalConstants()217 const GlobalEnvConstants *GlobalConstants() const 218 { 219 return glueData_.globalConst_; 220 } 221 GetBuiltinEntries()222 const BuiltinEntries GetBuiltinEntries() const 223 { 224 return glueData_.builtinEntries_; 225 } 226 GetBuiltinEntriesPointer()227 BuiltinEntries* GetBuiltinEntriesPointer() 228 { 229 return &glueData_.builtinEntries_; 230 } 231 GetArrayHClassIndexMap()232 const CMap<ElementsKind, ConstantIndex> &GetArrayHClassIndexMap() const 233 { 234 return arrayHClassIndexMap_; 235 } 236 237 void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver, StableArrayChangeKind changeKind); 238 IsStableArrayElementsGuardiansInvalid()239 bool IsStableArrayElementsGuardiansInvalid() const 240 { 241 return !glueData_.stableArrayElementsGuardians_; 242 } 243 244 void ResetGuardians(); 245 246 void SetInitialBuiltinHClass(BuiltinTypeId type, JSHClass *builtinHClass, JSHClass *prototypeHClass); 247 248 JSHClass *GetBuiltinHClass(BuiltinTypeId type) const; 249 250 JSHClass *GetBuiltinPrototypeHClass(BuiltinTypeId type) const; 251 252 static size_t GetBuiltinHClassOffset(BuiltinTypeId, bool isArch32); 253 254 static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId, bool isArch32); 255 GetBuiltinHClassEntries()256 const BuiltinHClassEntries &GetBuiltinHClassEntries() const 257 { 258 return glueData_.builtinHClassEntries_; 259 } 260 261 JSTaggedValue GetCurrentLexenv() const; 262 RegisterRTInterface(size_t id,Address addr)263 void RegisterRTInterface(size_t id, Address addr) 264 { 265 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 266 glueData_.rtStubEntries_.Set(id, addr); 267 } 268 GetRTInterface(size_t id)269 Address GetRTInterface(size_t id) const 270 { 271 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 272 return glueData_.rtStubEntries_.Get(id); 273 } 274 GetFastStubEntry(uint32_t id)275 Address GetFastStubEntry(uint32_t id) const 276 { 277 return glueData_.coStubEntries_.Get(id); 278 } 279 SetFastStubEntry(size_t id,Address entry)280 void SetFastStubEntry(size_t id, Address entry) 281 { 282 glueData_.coStubEntries_.Set(id, entry); 283 } 284 GetBuiltinStubEntry(uint32_t id)285 Address GetBuiltinStubEntry(uint32_t id) const 286 { 287 return glueData_.builtinStubEntries_.Get(id); 288 } 289 SetBuiltinStubEntry(size_t id,Address entry)290 void SetBuiltinStubEntry(size_t id, Address entry) 291 { 292 glueData_.builtinStubEntries_.Set(id, entry); 293 } 294 GetBCStubEntry(uint32_t id)295 Address GetBCStubEntry(uint32_t id) const 296 { 297 return glueData_.bcStubEntries_.Get(id); 298 } 299 SetBCStubEntry(size_t id,Address entry)300 void SetBCStubEntry(size_t id, Address entry) 301 { 302 glueData_.bcStubEntries_.Set(id, entry); 303 } 304 SetBCDebugStubEntry(size_t id,Address entry)305 void SetBCDebugStubEntry(size_t id, Address entry) 306 { 307 glueData_.bcDebuggerStubEntries_.Set(id, entry); 308 } 309 GetBytecodeHandler()310 Address *GetBytecodeHandler() 311 { 312 return glueData_.bcStubEntries_.GetAddr(); 313 } 314 315 void PUBLIC_API CheckSwitchDebuggerBCStub(); 316 void CheckOrSwitchPGOStubs(); 317 void SwitchJitProfileStubsIfNeeded(); 318 GetThreadId()319 ThreadId GetThreadId() const 320 { 321 return id_.load(std::memory_order_relaxed); 322 } 323 SetThreadId()324 void SetThreadId() 325 { 326 id_.store(JSThread::GetCurrentThreadId(), std::memory_order_relaxed); 327 } 328 GetCurrentThreadId()329 static ThreadId GetCurrentThreadId() 330 { 331 return os::thread::GetCurrentThreadId(); 332 } 333 334 void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor); 335 336 PropertiesCache *GetPropertiesCache() const; 337 GetMarkStatus()338 MarkStatus GetMarkStatus() const 339 { 340 return MarkStatusBits::Decode(glueData_.gcStateBitField_); 341 } 342 SetMarkStatus(MarkStatus status)343 void SetMarkStatus(MarkStatus status) 344 { 345 MarkStatusBits::Set(status, &glueData_.gcStateBitField_); 346 } 347 IsConcurrentMarkingOrFinished()348 bool IsConcurrentMarkingOrFinished() const 349 { 350 return !IsReadyToMark(); 351 } 352 IsReadyToMark()353 bool IsReadyToMark() const 354 { 355 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 356 return status == MarkStatus::READY_TO_MARK; 357 } 358 IsMarking()359 bool IsMarking() const 360 { 361 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 362 return status == MarkStatus::MARKING; 363 } 364 IsMarkFinished()365 bool IsMarkFinished() const 366 { 367 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 368 return status == MarkStatus::MARK_FINISHED; 369 } 370 SetPGOProfilerEnable(bool enable)371 void SetPGOProfilerEnable(bool enable) 372 { 373 PGOProfilerStatus status = 374 enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE; 375 PGOStatusBits::Set(status, &glueData_.interruptVector_); 376 } 377 IsPGOProfilerEnable()378 bool IsPGOProfilerEnable() const 379 { 380 auto status = PGOStatusBits::Decode(glueData_.interruptVector_); 381 return status == PGOProfilerStatus::PGO_PROFILER_ENABLE; 382 } 383 SetBCStubStatus(BCStubStatus status)384 void SetBCStubStatus(BCStubStatus status) 385 { 386 BCStubStatusBits::Set(status, &glueData_.interruptVector_); 387 } 388 GetBCStubStatus()389 BCStubStatus GetBCStubStatus() const 390 { 391 return BCStubStatusBits::Decode(glueData_.interruptVector_); 392 } 393 394 bool CheckSafepoint(); 395 SetGetStackSignal(bool isParseStack)396 void SetGetStackSignal(bool isParseStack) 397 { 398 getStackSignal_ = isParseStack; 399 } 400 GetStackSignal()401 bool GetStackSignal() const 402 { 403 return getStackSignal_; 404 } 405 SetNeedProfiling(bool needProfiling)406 void SetNeedProfiling(bool needProfiling) 407 { 408 needProfiling_.store(needProfiling); 409 } 410 SetIsProfiling(bool isProfiling)411 void SetIsProfiling(bool isProfiling) 412 { 413 isProfiling_ = isProfiling; 414 } 415 GetIsProfiling()416 bool GetIsProfiling() 417 { 418 return isProfiling_; 419 } 420 SetGcState(bool gcState)421 void SetGcState(bool gcState) 422 { 423 gcState_ = gcState; 424 } 425 GetGcState()426 bool GetGcState() const 427 { 428 return gcState_; 429 } 430 SetRuntimeState(bool runtimeState)431 void SetRuntimeState(bool runtimeState) 432 { 433 runtimeState_ = runtimeState; 434 } 435 GetRuntimeState()436 bool GetRuntimeState() const 437 { 438 return runtimeState_; 439 } 440 SetCpuProfileName(std::string & profileName)441 void SetCpuProfileName(std::string &profileName) 442 { 443 profileName_ = profileName; 444 } 445 EnableAsmInterpreter()446 void EnableAsmInterpreter() 447 { 448 isAsmInterpreter_ = true; 449 } 450 IsAsmInterpreter()451 bool IsAsmInterpreter() const 452 { 453 return isAsmInterpreter_; 454 } 455 GetVmThreadControl()456 VmThreadControl *GetVmThreadControl() const 457 { 458 return vmThreadControl_; 459 } 460 SetEnableStackSourceFile(bool value)461 void SetEnableStackSourceFile(bool value) 462 { 463 enableStackSourceFile_ = value; 464 } 465 GetEnableStackSourceFile()466 bool GetEnableStackSourceFile() const 467 { 468 return enableStackSourceFile_; 469 } 470 SetEnableLazyBuiltins(bool value)471 void SetEnableLazyBuiltins(bool value) 472 { 473 enableLazyBuiltins_ = value; 474 } 475 GetEnableLazyBuiltins()476 bool GetEnableLazyBuiltins() const 477 { 478 return enableLazyBuiltins_; 479 } 480 GetGlueDataOffset()481 static constexpr size_t GetGlueDataOffset() 482 { 483 return MEMBER_OFFSET(JSThread, glueData_); 484 } 485 GetGlueAddr()486 uintptr_t GetGlueAddr() const 487 { 488 return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset(); 489 } 490 GlueToJSThread(uintptr_t glue)491 static JSThread *GlueToJSThread(uintptr_t glue) 492 { 493 // very careful to modify here 494 return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset()); 495 } 496 SetCheckSafePointStatus()497 void SetCheckSafePointStatus() 498 { 499 LockHolder lock(interruptMutex_); 500 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 501 CheckSafePointBit::Set(true, &glueData_.interruptVector_); 502 } 503 ResetCheckSafePointStatus()504 void ResetCheckSafePointStatus() 505 { 506 LockHolder lock(interruptMutex_); 507 ResetCheckSafePointStatusWithoutLock(); 508 } 509 ResetCheckSafePointStatusWithoutLock()510 inline void ResetCheckSafePointStatusWithoutLock() 511 { 512 // The interruptMutex_ should be locked before calling this function. 513 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 514 CheckSafePointBit::Set(false, &glueData_.interruptVector_); 515 } 516 SetVMNeedSuspension(bool flag)517 void SetVMNeedSuspension(bool flag) 518 { 519 LockHolder lock(interruptMutex_); 520 VMNeedSuspensionBit::Set(flag, &glueData_.interruptVector_); 521 } 522 VMNeedSuspension()523 bool VMNeedSuspension() 524 { 525 LockHolder lock(interruptMutex_); 526 return VMNeedSuspensionWithoutLock(); 527 } 528 VMNeedSuspensionWithoutLock()529 inline bool VMNeedSuspensionWithoutLock() 530 { 531 // The interruptMutex_ should be locked before calling this function. 532 return VMNeedSuspensionBit::Decode(glueData_.interruptVector_); 533 } 534 SetVMSuspended(bool flag)535 void SetVMSuspended(bool flag) 536 { 537 LockHolder lock(interruptMutex_); 538 VMHasSuspendedBit::Set(flag, &glueData_.interruptVector_); 539 } 540 IsVMSuspended()541 bool IsVMSuspended() 542 { 543 LockHolder lock(interruptMutex_); 544 return VMHasSuspendedBit::Decode(glueData_.interruptVector_); 545 } 546 HasTerminationRequest()547 bool HasTerminationRequest() const 548 { 549 LockHolder lock(interruptMutex_); 550 return HasTerminationRequestWithoutLock(); 551 } 552 HasTerminationRequestWithoutLock()553 inline bool HasTerminationRequestWithoutLock() const 554 { 555 // The interruptMutex_ should be locked before calling this function. 556 return VMNeedTerminationBit::Decode(glueData_.interruptVector_); 557 } 558 SetTerminationRequest(bool flag)559 void SetTerminationRequest(bool flag) 560 { 561 LockHolder lock(interruptMutex_); 562 SetTerminationRequestWithoutLock(flag); 563 } 564 SetTerminationRequestWithoutLock(bool flag)565 inline void SetTerminationRequestWithoutLock(bool flag) 566 { 567 // The interruptMutex_ should be locked before calling this function. 568 VMNeedTerminationBit::Set(flag, &glueData_.interruptVector_); 569 } 570 SetVMTerminated(bool flag)571 void SetVMTerminated(bool flag) 572 { 573 LockHolder lock(interruptMutex_); 574 SetVMTerminatedWithoutLock(flag); 575 } 576 SetVMTerminatedWithoutLock(bool flag)577 inline void SetVMTerminatedWithoutLock(bool flag) 578 { 579 // The interruptMutex_ should be locked before calling this function. 580 VMHasTerminatedBit::Set(flag, &glueData_.interruptVector_); 581 } 582 HasTerminated()583 bool HasTerminated() const 584 { 585 LockHolder lock(interruptMutex_); 586 return VMHasTerminatedBit::Decode(glueData_.interruptVector_); 587 } 588 589 void TerminateExecution(); 590 SetInstallMachineCode(bool flag)591 void SetInstallMachineCode(bool flag) 592 { 593 LockHolder lock(interruptMutex_); 594 SetInstallMachineCodeWithoutLock(flag); 595 } 596 SetInstallMachineCodeWithoutLock(bool flag)597 inline void SetInstallMachineCodeWithoutLock(bool flag) 598 { 599 // The interruptMutex_ should be locked before calling this function. 600 InstallMachineCodeBit::Set(flag, &glueData_.interruptVector_); 601 } 602 HasInstallMachineCode()603 bool HasInstallMachineCode() const 604 { 605 LockHolder lock(interruptMutex_); 606 return HasInstallMachineCodeWithoutLock(); 607 } 608 HasInstallMachineCodeWithoutLock()609 inline bool HasInstallMachineCodeWithoutLock() const 610 { 611 // The interruptMutex_ should be locked before calling this function. 612 return InstallMachineCodeBit::Decode(glueData_.interruptVector_); 613 } 614 GetCurrentStackPosition()615 static uintptr_t GetCurrentStackPosition() 616 { 617 return reinterpret_cast<uintptr_t>(__builtin_frame_address(0)); 618 } 619 620 bool IsLegalAsmSp(uintptr_t sp) const; 621 622 bool IsLegalThreadSp(uintptr_t sp) const; 623 624 bool IsLegalSp(uintptr_t sp) const; 625 SetCheckAndCallEnterState(bool state)626 void SetCheckAndCallEnterState(bool state) 627 { 628 finalizationCheckState_ = state; 629 } 630 GetCheckAndCallEnterState()631 bool GetCheckAndCallEnterState() const 632 { 633 return finalizationCheckState_; 634 } 635 GetStackStart()636 uint64_t GetStackStart() const 637 { 638 return glueData_.stackStart_; 639 } 640 GetStackLimit()641 uint64_t GetStackLimit() const 642 { 643 return glueData_.stackLimit_; 644 } 645 GetGlueGlobalEnv()646 GlobalEnv *GetGlueGlobalEnv() 647 { 648 return glueData_.glueGlobalEnv_; 649 } 650 SetGlueGlobalEnv(GlobalEnv * global)651 void SetGlueGlobalEnv(GlobalEnv *global) 652 { 653 ASSERT(global != nullptr); 654 glueData_.glueGlobalEnv_ = global; 655 } 656 NewGlobalHandle(JSTaggedType value)657 inline uintptr_t NewGlobalHandle(JSTaggedType value) 658 { 659 return newGlobalHandle_(value); 660 } 661 DisposeGlobalHandle(uintptr_t nodeAddr)662 inline void DisposeGlobalHandle(uintptr_t nodeAddr) 663 { 664 disposeGlobalHandle_(nodeAddr); 665 } 666 667 inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr, 668 WeakClearCallback nativeFinalizeCallBack = nullptr) 669 { 670 return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack); 671 } 672 ClearWeak(uintptr_t nodeAddr)673 inline uintptr_t ClearWeak(uintptr_t nodeAddr) 674 { 675 return clearWeak_(nodeAddr); 676 } 677 IsWeak(uintptr_t addr)678 inline bool IsWeak(uintptr_t addr) const 679 { 680 return isWeak_(addr); 681 } 682 EnableCrossThreadExecution()683 void EnableCrossThreadExecution() 684 { 685 glueData_.allowCrossThreadExecution_ = true; 686 } 687 IsCrossThreadExecutionEnable()688 bool IsCrossThreadExecutionEnable() const 689 { 690 return glueData_.allowCrossThreadExecution_; 691 } 692 IsFrameDropped()693 bool IsFrameDropped() 694 { 695 return glueData_.isFrameDropped_; 696 } 697 SetFrameDroppedState()698 void SetFrameDroppedState() 699 { 700 glueData_.isFrameDropped_ = true; 701 } 702 ResetFrameDroppedState()703 void ResetFrameDroppedState() 704 { 705 glueData_.isFrameDropped_ = false; 706 } 707 IsEntryFrameDroppedTrue()708 bool IsEntryFrameDroppedTrue() 709 { 710 return glueData_.entryFrameDroppedState_ == FrameDroppedState::StateTrue; 711 } 712 IsEntryFrameDroppedPending()713 bool IsEntryFrameDroppedPending() 714 { 715 return glueData_.entryFrameDroppedState_ == FrameDroppedState::StatePending; 716 } 717 SetEntryFrameDroppedState()718 void SetEntryFrameDroppedState() 719 { 720 glueData_.entryFrameDroppedState_ = FrameDroppedState::StateTrue; 721 } 722 ResetEntryFrameDroppedState()723 void ResetEntryFrameDroppedState() 724 { 725 glueData_.entryFrameDroppedState_ = FrameDroppedState::StateFalse; 726 } 727 PendingEntryFrameDroppedState()728 void PendingEntryFrameDroppedState() 729 { 730 glueData_.entryFrameDroppedState_ = FrameDroppedState::StatePending; 731 } 732 IsDebugMode()733 bool IsDebugMode() 734 { 735 return glueData_.isDebugMode_; 736 } 737 SetDebugModeState()738 void SetDebugModeState() 739 { 740 glueData_.isDebugMode_ = true; 741 } 742 ResetDebugModeState()743 void ResetDebugModeState() 744 { 745 glueData_.isDebugMode_ = false; 746 } 747 748 bool IsStartGlobalLeakCheck() const; 749 bool EnableGlobalObjectLeakCheck() const; 750 bool EnableGlobalPrimitiveLeakCheck() const; 751 void WriteToStackTraceFd(std::ostringstream &buffer) const; 752 void SetStackTraceFd(int32_t fd); 753 void CloseStackTraceFd(); IncreaseGlobalNumberCount()754 uint32_t IncreaseGlobalNumberCount() 755 { 756 return ++globalNumberCount_; 757 } 758 SetPropertiesGrowStep(uint32_t step)759 void SetPropertiesGrowStep(uint32_t step) 760 { 761 glueData_.propertiesGrowStep_ = step; 762 } 763 GetPropertiesGrowStep()764 uint32_t GetPropertiesGrowStep() const 765 { 766 return glueData_.propertiesGrowStep_; 767 } 768 769 struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(), 770 BCStubEntries, 771 JSTaggedValue, 772 JSTaggedValue, 773 base::AlignedBool, 774 base::AlignedPointer, 775 base::AlignedPointer, 776 base::AlignedPointer, 777 base::AlignedPointer, 778 base::AlignedPointer, 779 RTStubEntries, 780 COStubEntries, 781 BuiltinStubEntries, 782 BuiltinHClassEntries, 783 BCDebuggerStubEntries, 784 base::AlignedUint64, 785 base::AlignedPointer, 786 base::AlignedUint64, 787 base::AlignedUint64, 788 base::AlignedPointer, 789 base::AlignedPointer, 790 base::AlignedUint64, 791 base::AlignedUint64, 792 JSTaggedValue, 793 base::AlignedBool, 794 base::AlignedBool, 795 base::AlignedUint32, 796 JSTaggedValue, 797 base::AlignedPointer, 798 BuiltinEntries, 799 JSTaggedValue, 800 base::AlignedBool> { 801 enum class Index : size_t { 802 BCStubEntriesIndex = 0, 803 ExceptionIndex, 804 GlobalObjIndex, 805 StableArrayElementsGuardiansIndex, 806 CurrentFrameIndex, 807 LeaveFrameIndex, 808 LastFpIndex, 809 NewSpaceAllocationTopAddressIndex, 810 NewSpaceAllocationEndAddressIndex, 811 RTStubEntriesIndex, 812 COStubEntriesIndex, 813 BuiltinsStubEntriesIndex, 814 BuiltinHClassEntriesIndex, 815 BCDebuggerStubEntriesIndex, 816 StateBitFieldIndex, 817 FrameBaseIndex, 818 StackStartIndex, 819 StackLimitIndex, 820 GlueGlobalEnvIndex, 821 GlobalConstIndex, 822 AllowCrossThreadExecutionIndex, 823 InterruptVectorIndex, 824 IsStartHeapSamplingIndex, 825 IsDebugModeIndex, 826 IsFrameDroppedIndex, 827 PropertiesGrowStepIndex, 828 EntryFrameDroppedStateIndex, 829 CurrentContextIndex, 830 BuiltinEntriesIndex, 831 SingleCharTableIndex, 832 IsTracingIndex, 833 NumOfMembers 834 }; 835 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes); 836 GetExceptionOffsetGlueData837 static size_t GetExceptionOffset(bool isArch32) 838 { 839 return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32); 840 } 841 GetGlobalObjOffsetGlueData842 static size_t GetGlobalObjOffset(bool isArch32) 843 { 844 return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32); 845 } 846 GetStableArrayElementsGuardiansOffsetGlueData847 static size_t GetStableArrayElementsGuardiansOffset(bool isArch32) 848 { 849 return GetOffset<static_cast<size_t>(Index::StableArrayElementsGuardiansIndex)>(isArch32); 850 } 851 GetGlobalConstOffsetGlueData852 static size_t GetGlobalConstOffset(bool isArch32) 853 { 854 return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32); 855 } 856 GetStateBitFieldOffsetGlueData857 static size_t GetStateBitFieldOffset(bool isArch32) 858 { 859 return GetOffset<static_cast<size_t>(Index::StateBitFieldIndex)>(isArch32); 860 } 861 GetCurrentFrameOffsetGlueData862 static size_t GetCurrentFrameOffset(bool isArch32) 863 { 864 return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32); 865 } 866 GetLeaveFrameOffsetGlueData867 static size_t GetLeaveFrameOffset(bool isArch32) 868 { 869 return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32); 870 } 871 GetLastFpOffsetGlueData872 static size_t GetLastFpOffset(bool isArch32) 873 { 874 return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32); 875 } 876 GetNewSpaceAllocationTopAddressOffsetGlueData877 static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32) 878 { 879 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32); 880 } 881 GetNewSpaceAllocationEndAddressOffsetGlueData882 static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32) 883 { 884 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32); 885 } 886 GetBCStubEntriesOffsetGlueData887 static size_t GetBCStubEntriesOffset(bool isArch32) 888 { 889 return GetOffset<static_cast<size_t>(Index::BCStubEntriesIndex)>(isArch32); 890 } 891 GetRTStubEntriesOffsetGlueData892 static size_t GetRTStubEntriesOffset(bool isArch32) 893 { 894 return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32); 895 } 896 GetCOStubEntriesOffsetGlueData897 static size_t GetCOStubEntriesOffset(bool isArch32) 898 { 899 return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32); 900 } 901 GetBuiltinsStubEntriesOffsetGlueData902 static size_t GetBuiltinsStubEntriesOffset(bool isArch32) 903 { 904 return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32); 905 } 906 GetBuiltinHClassEntriesOffsetGlueData907 static size_t GetBuiltinHClassEntriesOffset(bool isArch32) 908 { 909 return GetOffset<static_cast<size_t>(Index::BuiltinHClassEntriesIndex)>(isArch32); 910 } 911 GetBuiltinHClassOffsetGlueData912 static size_t GetBuiltinHClassOffset(BuiltinTypeId type, bool isArch32) 913 { 914 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetBuiltinHClassOffset(type); 915 } 916 GetBuiltinPrototypeHClassOffsetGlueData917 static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId type, bool isArch32) 918 { 919 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetPrototypeHClassOffset(type); 920 } 921 GetBCDebuggerStubEntriesOffsetGlueData922 static size_t GetBCDebuggerStubEntriesOffset(bool isArch32) 923 { 924 return GetOffset<static_cast<size_t>(Index::BCDebuggerStubEntriesIndex)>(isArch32); 925 } 926 GetFrameBaseOffsetGlueData927 static size_t GetFrameBaseOffset(bool isArch32) 928 { 929 return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32); 930 } 931 GetStackLimitOffsetGlueData932 static size_t GetStackLimitOffset(bool isArch32) 933 { 934 return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32); 935 } 936 GetGlueGlobalEnvOffsetGlueData937 static size_t GetGlueGlobalEnvOffset(bool isArch32) 938 { 939 return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32); 940 } 941 GetAllowCrossThreadExecutionOffsetGlueData942 static size_t GetAllowCrossThreadExecutionOffset(bool isArch32) 943 { 944 return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32); 945 } 946 GetInterruptVectorOffsetGlueData947 static size_t GetInterruptVectorOffset(bool isArch32) 948 { 949 return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32); 950 } 951 GetIsStartHeapSamplingOffsetGlueData952 static size_t GetIsStartHeapSamplingOffset(bool isArch32) 953 { 954 return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32); 955 } 956 GetIsDebugModeOffsetGlueData957 static size_t GetIsDebugModeOffset(bool isArch32) 958 { 959 return GetOffset<static_cast<size_t>(Index::IsDebugModeIndex)>(isArch32); 960 } 961 GetIsFrameDroppedOffsetGlueData962 static size_t GetIsFrameDroppedOffset(bool isArch32) 963 { 964 return GetOffset<static_cast<size_t>(Index::IsFrameDroppedIndex)>(isArch32); 965 } 966 GetPropertiesGrowStepOffsetGlueData967 static size_t GetPropertiesGrowStepOffset(bool isArch32) 968 { 969 return GetOffset<static_cast<size_t>(Index::PropertiesGrowStepIndex)>(isArch32); 970 } 971 GetEntryFrameDroppedStateOffsetGlueData972 static size_t GetEntryFrameDroppedStateOffset(bool isArch32) 973 { 974 return GetOffset<static_cast<size_t>(Index::EntryFrameDroppedStateIndex)>(isArch32); 975 } 976 GetCurrentContextOffsetGlueData977 static size_t GetCurrentContextOffset(bool isArch32) 978 { 979 return GetOffset<static_cast<size_t>(Index::CurrentContextIndex)>(isArch32); 980 } 981 GetBuiltinEntriesOffsetGlueData982 static size_t GetBuiltinEntriesOffset(bool isArch32) 983 { 984 return GetOffset<static_cast<size_t>(Index::BuiltinEntriesIndex)>(isArch32); 985 } 986 GetSingleCharTableOffsetGlueData987 static size_t GetSingleCharTableOffset(bool isArch32) 988 { 989 return GetOffset<static_cast<size_t>(Index::SingleCharTableIndex)>(isArch32); 990 } 991 GetIsTracingOffsetGlueData992 static size_t GetIsTracingOffset(bool isArch32) 993 { 994 return GetOffset<static_cast<size_t>(Index::IsTracingIndex)>(isArch32); 995 } 996 997 alignas(EAS) BCStubEntries bcStubEntries_; 998 alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()}; 999 alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()}; 1000 alignas(EAS) bool stableArrayElementsGuardians_ {true}; 1001 alignas(EAS) JSTaggedType *currentFrame_ {nullptr}; 1002 alignas(EAS) JSTaggedType *leaveFrame_ {nullptr}; 1003 alignas(EAS) JSTaggedType *lastFp_ {nullptr}; 1004 alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr}; 1005 alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr}; 1006 alignas(EAS) RTStubEntries rtStubEntries_; 1007 alignas(EAS) COStubEntries coStubEntries_; 1008 alignas(EAS) BuiltinStubEntries builtinStubEntries_; 1009 alignas(EAS) BuiltinHClassEntries builtinHClassEntries_; 1010 alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_; 1011 alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL}; 1012 alignas(EAS) JSTaggedType *frameBase_ {nullptr}; 1013 alignas(EAS) uint64_t stackStart_ {0}; 1014 alignas(EAS) uint64_t stackLimit_ {0}; 1015 alignas(EAS) GlobalEnv *glueGlobalEnv_; 1016 alignas(EAS) GlobalEnvConstants *globalConst_; 1017 alignas(EAS) bool allowCrossThreadExecution_ {false}; 1018 alignas(EAS) volatile uint64_t interruptVector_ {0}; 1019 alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()}; 1020 alignas(EAS) bool isDebugMode_ {false}; 1021 alignas(EAS) bool isFrameDropped_ {false}; 1022 alignas(EAS) uint32_t propertiesGrowStep_ {JSObjectResizingStrategy::PROPERTIES_GROW_SIZE}; 1023 alignas(EAS) uint64_t entryFrameDroppedState_ {FrameDroppedState::StateFalse}; 1024 alignas(EAS) EcmaContext *currentContext_ {nullptr}; 1025 alignas(EAS) BuiltinEntries builtinEntries_; 1026 alignas(EAS) JSTaggedValue singleCharTable_ {JSTaggedValue::Hole()}; 1027 alignas(EAS) bool isTracing_ {false}; 1028 }; 1029 STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64); 1030 1031 void PushContext(EcmaContext *context); 1032 void PopContext(); 1033 GetCurrentEcmaContext()1034 EcmaContext *GetCurrentEcmaContext() const 1035 { 1036 return glueData_.currentContext_; 1037 } 1038 GetSingleCharTable()1039 JSTaggedValue GetSingleCharTable() const 1040 { 1041 ASSERT(glueData_.singleCharTable_ != JSTaggedValue::Hole()); 1042 return glueData_.singleCharTable_; 1043 } 1044 SetSingleCharTable(JSTaggedValue singleCharTable)1045 void SetSingleCharTable(JSTaggedValue singleCharTable) 1046 { 1047 glueData_.singleCharTable_ = singleCharTable; 1048 } 1049 1050 void SwitchCurrentContext(EcmaContext *currentContext, bool isInIterate = false); 1051 GetEcmaContexts()1052 CVector<EcmaContext *> GetEcmaContexts() 1053 { 1054 return contexts_; 1055 } 1056 1057 bool IsPropertyCacheCleared() const; 1058 1059 bool EraseContext(EcmaContext *context); 1060 1061 const GlobalEnvConstants *GetFirstGlobalConst() const; 1062 bool IsAllContextsInitialized() const; 1063 bool IsReadyToUpdateDetector() const; 1064 Area *GetOrCreateRegExpCache(); 1065 1066 void InitializeBuiltinObject(const std::string& key); 1067 void InitializeBuiltinObject(); 1068 1069 private: 1070 NO_COPY_SEMANTIC(JSThread); 1071 NO_MOVE_SEMANTIC(JSThread); SetGlobalConst(GlobalEnvConstants * globalConst)1072 void SetGlobalConst(GlobalEnvConstants *globalConst) 1073 { 1074 glueData_.globalConst_ = globalConst; 1075 } SetCurrentEcmaContext(EcmaContext * context)1076 void SetCurrentEcmaContext(EcmaContext *context) 1077 { 1078 glueData_.currentContext_ = context; 1079 } 1080 SetArrayHClassIndexMap(const CMap<ElementsKind,ConstantIndex> & map)1081 void SetArrayHClassIndexMap(const CMap<ElementsKind, ConstantIndex> &map) 1082 { 1083 arrayHClassIndexMap_ = map; 1084 } 1085 1086 void DumpStack() DUMP_API_ATTR; 1087 1088 static size_t GetAsmStackLimit(); 1089 1090 static bool IsMainThread(); 1091 1092 static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB; 1093 1094 GlueData glueData_; 1095 std::atomic<ThreadId> id_; 1096 EcmaVM *vm_ {nullptr}; 1097 Area *regExpCache_ {nullptr}; 1098 1099 // MM: handles, global-handles, and aot-stubs. 1100 int nestedLevel_ = 0; 1101 NativeAreaAllocator *nativeAreaAllocator_ {nullptr}; 1102 HeapRegionAllocator *heapRegionAllocator_ {nullptr}; 1103 std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {}; 1104 1105 EcmaGlobalStorage<Node> *globalStorage_ {nullptr}; 1106 EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr}; 1107 int32_t stackTraceFd_ {-1}; 1108 1109 std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_; 1110 std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_; 1111 std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_, 1112 WeakClearCallback nativeFinalizeCallBack)> setWeak_; 1113 std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_; 1114 std::function<bool(uintptr_t addr)> isWeak_; 1115 uint32_t globalNumberCount_ {0}; 1116 1117 // Run-time state 1118 bool getStackSignal_ {false}; 1119 bool runtimeState_ {false}; 1120 bool isAsmInterpreter_ {false}; 1121 VmThreadControl *vmThreadControl_ {nullptr}; 1122 bool enableStackSourceFile_ {true}; 1123 bool enableLazyBuiltins_ {false}; 1124 1125 // CpuProfiler 1126 bool isProfiling_ {false}; 1127 bool gcState_ {false}; 1128 std::atomic_bool needProfiling_ {false}; 1129 std::string profileName_ {""}; 1130 1131 bool finalizationCheckState_ {false}; 1132 1133 CMap<ElementsKind, ConstantIndex> arrayHClassIndexMap_; 1134 1135 CVector<EcmaContext *> contexts_; 1136 EcmaContext *currentContext_ {nullptr}; 1137 mutable Mutex interruptMutex_; 1138 1139 friend class GlobalHandleCollection; 1140 friend class EcmaVM; 1141 friend class EcmaContext; 1142 }; 1143 } // namespace panda::ecmascript 1144 #endif // ECMASCRIPT_JS_THREAD_H 1145