1 /* 2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_JS_THREAD_H 17 #define ECMASCRIPT_JS_THREAD_H 18 19 #include <atomic> 20 #include <sstream> 21 #include <string> 22 #include <cstdint> 23 24 #include "ecmascript/base/aligned_struct.h" 25 #include "ecmascript/builtin_entries.h" 26 #include "ecmascript/daemon/daemon_task.h" 27 #include "ecmascript/global_index.h" 28 #include "ecmascript/js_object_resizing_strategy.h" 29 #include "ecmascript/js_tagged_value.h" 30 #include "ecmascript/js_thread_hclass_entries.h" 31 #include "ecmascript/js_thread_stub_entries.h" 32 #include "ecmascript/log_wrapper.h" 33 #include "ecmascript/mem/visitor.h" 34 #include "ecmascript/mutator_lock.h" 35 36 #if defined(ENABLE_FFRT_INTERFACES) 37 #include "ffrt.h" 38 #include "c/executor_task.h" 39 #endif 40 41 namespace panda::ecmascript { 42 class DateUtils; 43 class EcmaContext; 44 class EcmaVM; 45 class EcmaHandleScope; 46 class GlobalIndex; 47 class HeapRegionAllocator; 48 class PropertiesCache; 49 template<typename T> 50 class EcmaGlobalStorage; 51 class Node; 52 class DebugNode; 53 class VmThreadControl; 54 class GlobalEnvConstants; 55 enum class ElementsKind : uint8_t; 56 57 class MachineCode; 58 using JitCodeVector = std::vector<std::tuple<MachineCode*, std::string, uintptr_t>>; 59 using JitCodeMapVisitor = std::function<void(std::map<JSTaggedType, JitCodeVector*>&)>; 60 61 using WeakClearCallback = void (*)(void *); 62 63 enum class MarkStatus : uint8_t { 64 READY_TO_MARK, 65 MARKING, 66 MARK_FINISHED, 67 }; 68 69 enum class GCKind : uint8_t { 70 LOCAL_GC, 71 SHARED_GC 72 }; 73 74 enum class PGOProfilerStatus : uint8_t { 75 PGO_PROFILER_DISABLE, 76 PGO_PROFILER_ENABLE, 77 }; 78 79 enum class BCStubStatus: uint8_t { 80 NORMAL_BC_STUB, 81 PROFILE_BC_STUB, 82 JIT_PROFILE_BC_STUB, 83 }; 84 85 enum class StableArrayChangeKind { PROTO, NOT_PROTO }; 86 87 enum ThreadType : uint8_t { 88 JS_THREAD, 89 JIT_THREAD, 90 DAEMON_THREAD, 91 }; 92 93 enum ThreadFlag : uint16_t { 94 NO_FLAGS = 0 << 0, 95 SUSPEND_REQUEST = 1 << 0, 96 ACTIVE_BARRIER = 1 << 1, 97 }; 98 99 static constexpr uint32_t THREAD_STATE_OFFSET = 16; 100 static constexpr uint32_t THREAD_FLAGS_MASK = (0x1 << THREAD_STATE_OFFSET) - 1; 101 enum class ThreadState : uint16_t { 102 CREATED = 0, 103 RUNNING = 1, 104 NATIVE = 2, 105 WAIT = 3, 106 IS_SUSPENDED = 4, 107 TERMINATED = 5, 108 }; 109 110 union ThreadStateAndFlags { asInt(val)111 explicit ThreadStateAndFlags(uint32_t val = 0): asInt(val) {} 112 struct { 113 volatile uint16_t flags; 114 volatile ThreadState state; 115 } asStruct; 116 volatile uint32_t asInt; 117 uint32_t asNonvolatileInt; 118 std::atomic<uint32_t> asAtomicInt; 119 private: 120 NO_COPY_SEMANTIC(ThreadStateAndFlags); 121 }; 122 123 static constexpr uint32_t MAIN_THREAD_INDEX = 0; 124 125 class JSThread { 126 public: 127 static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2; 128 static constexpr int CONCURRENT_MARKING_BITFIELD_MASK = 0x3; 129 static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_NUM = 1; 130 static constexpr int SHARED_CONCURRENT_MARKING_BITFIELD_MASK = 0x1; 131 static constexpr int CHECK_SAFEPOINT_BITFIELD_NUM = 8; 132 static constexpr int PGO_PROFILER_BITFIELD_START = 16; 133 static constexpr int BOOL_BITFIELD_NUM = 1; 134 static constexpr int BCSTUBSTATUS_BITFIELD_NUM = 2; 135 static constexpr uint32_t RESERVE_STACK_SIZE = 128; 136 using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>; 137 using SharedMarkStatusBits = BitField<SharedMarkStatus, 0, SHARED_CONCURRENT_MARKING_BITFIELD_NUM>; 138 using CheckSafePointBit = BitField<bool, 0, BOOL_BITFIELD_NUM>; 139 using VMNeedSuspensionBit = BitField<bool, CHECK_SAFEPOINT_BITFIELD_NUM, BOOL_BITFIELD_NUM>; 140 using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag; 141 using InstallMachineCodeBit = VMHasSuspendedBit::NextFlag; 142 using PGOStatusBits = BitField<PGOProfilerStatus, PGO_PROFILER_BITFIELD_START, BOOL_BITFIELD_NUM>; 143 using BCStubStatusBits = PGOStatusBits::NextField<BCStubStatus, BCSTUBSTATUS_BITFIELD_NUM>; 144 using ThreadId = uint32_t; 145 146 enum FrameDroppedState { 147 StateFalse = 0, 148 StateTrue, 149 StatePending 150 }; 151 152 explicit JSThread(EcmaVM *vm); 153 // only used in jit thread 154 explicit JSThread(EcmaVM *vm, ThreadType threadType); 155 // only used in daemon thread 156 explicit JSThread(ThreadType threadType); 157 158 PUBLIC_API ~JSThread(); 159 GetEcmaVM()160 EcmaVM *GetEcmaVM() const 161 { 162 return vm_; 163 } 164 165 static JSThread *Create(EcmaVM *vm); 166 static JSThread *GetCurrent(); 167 GetNestedLevel()168 int GetNestedLevel() const 169 { 170 return nestedLevel_; 171 } 172 SetNestedLevel(int level)173 void SetNestedLevel(int level) 174 { 175 nestedLevel_ = level; 176 } 177 SetLastFp(JSTaggedType * fp)178 void SetLastFp(JSTaggedType *fp) 179 { 180 glueData_.lastFp_ = fp; 181 } 182 GetLastFp()183 const JSTaggedType *GetLastFp() const 184 { 185 return glueData_.lastFp_; 186 } 187 GetCurrentSPFrame()188 const JSTaggedType *GetCurrentSPFrame() const 189 { 190 return glueData_.currentFrame_; 191 } 192 SetCurrentSPFrame(JSTaggedType * sp)193 void SetCurrentSPFrame(JSTaggedType *sp) 194 { 195 glueData_.currentFrame_ = sp; 196 } 197 GetLastLeaveFrame()198 const JSTaggedType *GetLastLeaveFrame() const 199 { 200 return glueData_.leaveFrame_; 201 } 202 SetLastLeaveFrame(JSTaggedType * sp)203 void SetLastLeaveFrame(JSTaggedType *sp) 204 { 205 glueData_.leaveFrame_ = sp; 206 } 207 208 const JSTaggedType *GetCurrentFrame() const; 209 210 void SetCurrentFrame(JSTaggedType *sp); 211 212 const JSTaggedType *GetCurrentInterpretedFrame() const; 213 214 bool DoStackOverflowCheck(const JSTaggedType *sp); 215 216 bool DoStackLimitCheck(); 217 GetNativeAreaAllocator()218 NativeAreaAllocator *GetNativeAreaAllocator() const 219 { 220 return nativeAreaAllocator_; 221 } 222 GetHeapRegionAllocator()223 HeapRegionAllocator *GetHeapRegionAllocator() const 224 { 225 return heapRegionAllocator_; 226 } 227 ReSetNewSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)228 void ReSetNewSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 229 { 230 glueData_.newSpaceAllocationTopAddress_ = top; 231 glueData_.newSpaceAllocationEndAddress_ = end; 232 } 233 ReSetSOldSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)234 void ReSetSOldSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 235 { 236 glueData_.sOldSpaceAllocationTopAddress_ = top; 237 glueData_.sOldSpaceAllocationEndAddress_ = end; 238 } 239 ReSetSNonMovableSpaceAllocationAddress(const uintptr_t * top,const uintptr_t * end)240 void ReSetSNonMovableSpaceAllocationAddress(const uintptr_t *top, const uintptr_t* end) 241 { 242 glueData_.sNonMovableSpaceAllocationTopAddress_ = top; 243 glueData_.sNonMovableSpaceAllocationEndAddress_ = end; 244 } 245 GetUnsharedConstpools()246 uintptr_t GetUnsharedConstpools() const 247 { 248 return glueData_.unsharedConstpools_; 249 } 250 SetUnsharedConstpools(uintptr_t unsharedConstpools)251 void SetUnsharedConstpools(uintptr_t unsharedConstpools) 252 { 253 glueData_.unsharedConstpools_ = unsharedConstpools; 254 } 255 SetIsStartHeapSampling(bool isStart)256 void SetIsStartHeapSampling(bool isStart) 257 { 258 glueData_.isStartHeapSampling_ = isStart ? JSTaggedValue::True() : JSTaggedValue::False(); 259 } 260 SetIsTracing(bool isTracing)261 void SetIsTracing(bool isTracing) 262 { 263 glueData_.isTracing_ = isTracing; 264 } 265 266 void Iterate(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor, 267 const RootBaseAndDerivedVisitor &derivedVisitor); 268 269 void IterateJitCodeMap(const JitCodeMapVisitor &updater); 270 271 void IterateHandleWithCheck(const RootVisitor &visitor, const RootRangeVisitor &rangeVisitor); 272 273 uintptr_t* PUBLIC_API ExpandHandleStorage(); 274 void PUBLIC_API ShrinkHandleStorage(int prevIndex); 275 void PUBLIC_API CheckJSTaggedType(JSTaggedType value) const; 276 bool PUBLIC_API CpuProfilerCheckJSTaggedType(JSTaggedType value) const; 277 278 void PUBLIC_API SetException(JSTaggedValue exception); 279 GetException()280 JSTaggedValue GetException() const 281 { 282 return glueData_.exception_; 283 } 284 HasPendingException()285 bool HasPendingException() const 286 { 287 return !glueData_.exception_.IsHole(); 288 } 289 290 void ClearException(); 291 SetGlobalObject(JSTaggedValue globalObject)292 void SetGlobalObject(JSTaggedValue globalObject) 293 { 294 glueData_.globalObject_ = globalObject; 295 } 296 GetGlobalEnv()297 const GlobalEnv *GetGlobalEnv() const 298 { 299 return glueData_.glueGlobalEnv_; 300 } 301 GlobalConstants()302 const GlobalEnvConstants *GlobalConstants() const 303 { 304 return glueData_.globalConst_; 305 } 306 SetGlobalConstants(const GlobalEnvConstants * constants)307 void SetGlobalConstants(const GlobalEnvConstants *constants) 308 { 309 glueData_.globalConst_ = const_cast<GlobalEnvConstants*>(constants); 310 } 311 GetBuiltinEntries()312 const BuiltinEntries GetBuiltinEntries() const 313 { 314 return glueData_.builtinEntries_; 315 } 316 GetBuiltinEntriesPointer()317 BuiltinEntries* GetBuiltinEntriesPointer() 318 { 319 return &glueData_.builtinEntries_; 320 } 321 GetArrayHClassIndexMap()322 const CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> &GetArrayHClassIndexMap() const 323 { 324 return arrayHClassIndexMap_; 325 } 326 GetCtorHclassEntries()327 const CMap<JSHClass *, GlobalIndex> &GetCtorHclassEntries() const 328 { 329 return ctorHclassEntries_; 330 } 331 332 void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver, StableArrayChangeKind changeKind); 333 IsStableArrayElementsGuardiansInvalid()334 bool IsStableArrayElementsGuardiansInvalid() const 335 { 336 return !glueData_.stableArrayElementsGuardians_; 337 } 338 339 void ResetGuardians(); 340 341 void SetInitialBuiltinHClass( 342 BuiltinTypeId type, JSHClass *builtinHClass, JSHClass *instanceHClass, 343 JSHClass *prototypeHClass, JSHClass *prototypeOfPrototypeHClass = nullptr, 344 JSHClass *extraHClass = nullptr); 345 346 void SetInitialBuiltinGlobalHClass(JSHClass *builtinHClass, GlobalIndex globalIndex); 347 348 JSHClass *GetBuiltinHClass(BuiltinTypeId type) const; 349 350 JSHClass *GetBuiltinInstanceHClass(BuiltinTypeId type) const; 351 JSHClass *GetBuiltinExtraHClass(BuiltinTypeId type) const; 352 JSHClass *GetArrayInstanceHClass(ElementsKind kind, bool isPrototype) const; 353 354 PUBLIC_API JSHClass *GetBuiltinPrototypeHClass(BuiltinTypeId type) const; 355 PUBLIC_API JSHClass *GetBuiltinPrototypeOfPrototypeHClass(BuiltinTypeId type) const; 356 357 static size_t GetBuiltinHClassOffset(BuiltinTypeId, bool isArch32); 358 359 static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId, bool isArch32); 360 GetBuiltinHClassEntries()361 const BuiltinHClassEntries &GetBuiltinHClassEntries() const 362 { 363 return glueData_.builtinHClassEntries_; 364 } 365 366 JSTaggedValue GetCurrentLexenv() const; 367 JSTaggedValue GetCurrentFunction() const; 368 RegisterRTInterface(size_t id,Address addr)369 void RegisterRTInterface(size_t id, Address addr) 370 { 371 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 372 glueData_.rtStubEntries_.Set(id, addr); 373 } 374 GetRTInterface(size_t id)375 Address GetRTInterface(size_t id) const 376 { 377 ASSERT(id < kungfu::RuntimeStubCSigns::NUM_OF_STUBS); 378 return glueData_.rtStubEntries_.Get(id); 379 } 380 GetFastStubEntry(uint32_t id)381 Address GetFastStubEntry(uint32_t id) const 382 { 383 return glueData_.coStubEntries_.Get(id); 384 } 385 SetFastStubEntry(size_t id,Address entry)386 void SetFastStubEntry(size_t id, Address entry) 387 { 388 glueData_.coStubEntries_.Set(id, entry); 389 } 390 GetBuiltinStubEntry(uint32_t id)391 Address GetBuiltinStubEntry(uint32_t id) const 392 { 393 return glueData_.builtinStubEntries_.Get(id); 394 } 395 SetBuiltinStubEntry(size_t id,Address entry)396 void SetBuiltinStubEntry(size_t id, Address entry) 397 { 398 glueData_.builtinStubEntries_.Set(id, entry); 399 } 400 GetBCStubEntry(uint32_t id)401 Address GetBCStubEntry(uint32_t id) const 402 { 403 return glueData_.bcStubEntries_.Get(id); 404 } 405 SetBCStubEntry(size_t id,Address entry)406 void SetBCStubEntry(size_t id, Address entry) 407 { 408 glueData_.bcStubEntries_.Set(id, entry); 409 } 410 GetBaselineStubEntry(uint32_t id)411 Address GetBaselineStubEntry(uint32_t id) const 412 { 413 return glueData_.baselineStubEntries_.Get(id); 414 } 415 SetBaselineStubEntry(size_t id,Address entry)416 void SetBaselineStubEntry(size_t id, Address entry) 417 { 418 glueData_.baselineStubEntries_.Set(id, entry); 419 } 420 SetBCDebugStubEntry(size_t id,Address entry)421 void SetBCDebugStubEntry(size_t id, Address entry) 422 { 423 glueData_.bcDebuggerStubEntries_.Set(id, entry); 424 } 425 GetBytecodeHandler()426 Address *GetBytecodeHandler() 427 { 428 return glueData_.bcStubEntries_.GetAddr(); 429 } 430 431 void PUBLIC_API CheckSwitchDebuggerBCStub(); 432 void CheckOrSwitchPGOStubs(); 433 void SwitchJitProfileStubs(bool isEnablePgo); 434 GetThreadId()435 ThreadId GetThreadId() const 436 { 437 return id_.load(std::memory_order_acquire); 438 } 439 440 void PostFork(); 441 GetCurrentThreadId()442 static ThreadId GetCurrentThreadId() 443 { 444 #if defined(ENABLE_FFRT_INTERFACES) 445 JSThread::ThreadId id = ffrt_this_task_get_id(); 446 if (id != 0) { 447 return id; 448 } else { 449 return os::thread::GetCurrentThreadId(); 450 } 451 #else 452 return os::thread::GetCurrentThreadId(); 453 #endif 454 } 455 456 void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor, GCKind gcKind = GCKind::LOCAL_GC); 457 458 void UpdateJitCodeMapReference(const WeakRootVisitor &visitor); 459 460 PUBLIC_API PropertiesCache *GetPropertiesCache() const; 461 GetMarkStatus()462 MarkStatus GetMarkStatus() const 463 { 464 return MarkStatusBits::Decode(glueData_.gcStateBitField_); 465 } 466 SetMarkStatus(MarkStatus status)467 void SetMarkStatus(MarkStatus status) 468 { 469 MarkStatusBits::Set(status, &glueData_.gcStateBitField_); 470 } 471 IsConcurrentMarkingOrFinished()472 bool IsConcurrentMarkingOrFinished() const 473 { 474 return !IsReadyToConcurrentMark(); 475 } 476 IsReadyToConcurrentMark()477 bool IsReadyToConcurrentMark() const 478 { 479 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 480 return status == MarkStatus::READY_TO_MARK; 481 } 482 IsMarking()483 bool IsMarking() const 484 { 485 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 486 return status == MarkStatus::MARKING; 487 } 488 IsMarkFinished()489 bool IsMarkFinished() const 490 { 491 auto status = MarkStatusBits::Decode(glueData_.gcStateBitField_); 492 return status == MarkStatus::MARK_FINISHED; 493 } 494 GetSharedMarkStatus()495 SharedMarkStatus GetSharedMarkStatus() const 496 { 497 return SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_); 498 } 499 SetSharedMarkStatus(SharedMarkStatus status)500 void SetSharedMarkStatus(SharedMarkStatus status) 501 { 502 SharedMarkStatusBits::Set(status, &glueData_.sharedGCStateBitField_); 503 } 504 IsSharedConcurrentMarkingOrFinished()505 bool IsSharedConcurrentMarkingOrFinished() const 506 { 507 auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_); 508 return status == SharedMarkStatus::CONCURRENT_MARKING_OR_FINISHED; 509 } 510 IsReadyToSharedConcurrentMark()511 bool IsReadyToSharedConcurrentMark() const 512 { 513 auto status = SharedMarkStatusBits::Decode(glueData_.sharedGCStateBitField_); 514 return status == SharedMarkStatus::READY_TO_CONCURRENT_MARK; 515 } 516 SetPGOProfilerEnable(bool enable)517 void SetPGOProfilerEnable(bool enable) 518 { 519 PGOProfilerStatus status = 520 enable ? PGOProfilerStatus::PGO_PROFILER_ENABLE : PGOProfilerStatus::PGO_PROFILER_DISABLE; 521 SetInterruptValue<PGOStatusBits>(status); 522 } 523 IsPGOProfilerEnable()524 bool IsPGOProfilerEnable() const 525 { 526 auto status = PGOStatusBits::Decode(glueData_.interruptVector_); 527 return status == PGOProfilerStatus::PGO_PROFILER_ENABLE; 528 } 529 SetBCStubStatus(BCStubStatus status)530 void SetBCStubStatus(BCStubStatus status) 531 { 532 SetInterruptValue<BCStubStatusBits>(status); 533 } 534 GetBCStubStatus()535 BCStubStatus GetBCStubStatus() const 536 { 537 return BCStubStatusBits::Decode(glueData_.interruptVector_); 538 } 539 540 bool ShouldHandleMarkingFinishedInSafepoint(); 541 542 bool CheckSafepoint(); 543 544 void CheckAndPassActiveBarrier(); 545 546 bool PassSuspendBarrier(); 547 SetGetStackSignal(bool isParseStack)548 void SetGetStackSignal(bool isParseStack) 549 { 550 getStackSignal_ = isParseStack; 551 } 552 GetStackSignal()553 bool GetStackSignal() const 554 { 555 return getStackSignal_; 556 } 557 SetNeedProfiling(bool needProfiling)558 void SetNeedProfiling(bool needProfiling) 559 { 560 needProfiling_.store(needProfiling); 561 } 562 SetIsProfiling(bool isProfiling)563 void SetIsProfiling(bool isProfiling) 564 { 565 isProfiling_ = isProfiling; 566 } 567 GetIsProfiling()568 bool GetIsProfiling() const 569 { 570 return isProfiling_; 571 } 572 SetGcState(bool gcState)573 void SetGcState(bool gcState) 574 { 575 gcState_ = gcState; 576 } 577 GetGcState()578 bool GetGcState() const 579 { 580 return gcState_; 581 } 582 SetRuntimeState(bool runtimeState)583 void SetRuntimeState(bool runtimeState) 584 { 585 runtimeState_ = runtimeState; 586 } 587 GetRuntimeState()588 bool GetRuntimeState() const 589 { 590 return runtimeState_; 591 } 592 SetMainThread()593 bool SetMainThread() 594 { 595 return isMainThread_ = true; 596 } 597 IsMainThreadFast()598 bool IsMainThreadFast() const 599 { 600 return isMainThread_; 601 } 602 SetCpuProfileName(std::string & profileName)603 void SetCpuProfileName(std::string &profileName) 604 { 605 profileName_ = profileName; 606 } 607 EnableAsmInterpreter()608 void EnableAsmInterpreter() 609 { 610 isAsmInterpreter_ = true; 611 } 612 IsAsmInterpreter()613 bool IsAsmInterpreter() const 614 { 615 return isAsmInterpreter_; 616 } 617 GetVmThreadControl()618 VmThreadControl *GetVmThreadControl() const 619 { 620 return vmThreadControl_; 621 } 622 SetEnableStackSourceFile(bool value)623 void SetEnableStackSourceFile(bool value) 624 { 625 enableStackSourceFile_ = value; 626 } 627 GetEnableStackSourceFile()628 bool GetEnableStackSourceFile() const 629 { 630 return enableStackSourceFile_; 631 } 632 SetEnableLazyBuiltins(bool value)633 void SetEnableLazyBuiltins(bool value) 634 { 635 enableLazyBuiltins_ = value; 636 } 637 GetEnableLazyBuiltins()638 bool GetEnableLazyBuiltins() const 639 { 640 return enableLazyBuiltins_; 641 } 642 SetReadyForGCIterating(bool flag)643 void SetReadyForGCIterating(bool flag) 644 { 645 readyForGCIterating_ = flag; 646 } 647 ReadyForGCIterating()648 bool ReadyForGCIterating() const 649 { 650 return readyForGCIterating_; 651 } 652 GetGlueDataOffset()653 static constexpr size_t GetGlueDataOffset() 654 { 655 return MEMBER_OFFSET(JSThread, glueData_); 656 } 657 GetGlueAddr()658 uintptr_t GetGlueAddr() const 659 { 660 return reinterpret_cast<uintptr_t>(this) + GetGlueDataOffset(); 661 } 662 GlueToJSThread(uintptr_t glue)663 static JSThread *GlueToJSThread(uintptr_t glue) 664 { 665 // very careful to modify here 666 return reinterpret_cast<JSThread *>(glue - GetGlueDataOffset()); 667 } 668 SetCheckSafePointStatus()669 void SetCheckSafePointStatus() 670 { 671 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 672 SetInterruptValue<CheckSafePointBit>(true); 673 } 674 ResetCheckSafePointStatus()675 void ResetCheckSafePointStatus() 676 { 677 ASSERT(static_cast<uint8_t>(glueData_.interruptVector_ & 0xFF) <= 1); 678 SetInterruptValue<CheckSafePointBit>(false); 679 } 680 SetVMNeedSuspension(bool flag)681 void SetVMNeedSuspension(bool flag) 682 { 683 SetInterruptValue<VMNeedSuspensionBit>(flag); 684 } 685 VMNeedSuspension()686 bool VMNeedSuspension() 687 { 688 return VMNeedSuspensionBit::Decode(glueData_.interruptVector_); 689 } 690 SetVMSuspended(bool flag)691 void SetVMSuspended(bool flag) 692 { 693 SetInterruptValue<VMHasSuspendedBit>(flag); 694 } 695 IsVMSuspended()696 bool IsVMSuspended() 697 { 698 return VMHasSuspendedBit::Decode(glueData_.interruptVector_); 699 } 700 HasTerminationRequest()701 bool HasTerminationRequest() const 702 { 703 return needTermination_; 704 } 705 SetTerminationRequest(bool flag)706 void SetTerminationRequest(bool flag) 707 { 708 needTermination_ = flag; 709 } 710 SetVMTerminated(bool flag)711 void SetVMTerminated(bool flag) 712 { 713 hasTerminated_ = flag; 714 } 715 HasTerminated()716 bool HasTerminated() const 717 { 718 return hasTerminated_; 719 } 720 721 void TerminateExecution(); 722 SetInstallMachineCode(bool flag)723 void SetInstallMachineCode(bool flag) 724 { 725 SetInterruptValue<InstallMachineCodeBit>(flag); 726 } 727 HasInstallMachineCode()728 bool HasInstallMachineCode() const 729 { 730 return InstallMachineCodeBit::Decode(glueData_.interruptVector_); 731 } 732 GetCurrentStackPosition()733 static uintptr_t GetCurrentStackPosition() 734 { 735 return reinterpret_cast<uintptr_t>(__builtin_frame_address(0)); 736 } 737 738 bool IsLegalAsmSp(uintptr_t sp) const; 739 740 bool IsLegalThreadSp(uintptr_t sp) const; 741 742 bool IsLegalSp(uintptr_t sp) const; 743 SetCheckAndCallEnterState(bool state)744 void SetCheckAndCallEnterState(bool state) 745 { 746 finalizationCheckState_ = state; 747 } 748 GetCheckAndCallEnterState()749 bool GetCheckAndCallEnterState() const 750 { 751 return finalizationCheckState_; 752 } 753 GetStackStart()754 uint64_t GetStackStart() const 755 { 756 return glueData_.stackStart_; 757 } 758 GetStackLimit()759 uint64_t GetStackLimit() const 760 { 761 return glueData_.stackLimit_; 762 } 763 GetGlueGlobalEnv()764 GlobalEnv *GetGlueGlobalEnv() 765 { 766 return glueData_.glueGlobalEnv_; 767 } 768 SetGlueGlobalEnv(GlobalEnv * global)769 void SetGlueGlobalEnv(GlobalEnv *global) 770 { 771 ASSERT(global != nullptr); 772 glueData_.glueGlobalEnv_ = global; 773 } 774 NewGlobalHandle(JSTaggedType value)775 inline uintptr_t NewGlobalHandle(JSTaggedType value) 776 { 777 return newGlobalHandle_(value); 778 } 779 DisposeGlobalHandle(uintptr_t nodeAddr)780 inline void DisposeGlobalHandle(uintptr_t nodeAddr) 781 { 782 disposeGlobalHandle_(nodeAddr); 783 } 784 785 inline uintptr_t SetWeak(uintptr_t nodeAddr, void *ref = nullptr, WeakClearCallback freeGlobalCallBack = nullptr, 786 WeakClearCallback nativeFinalizeCallBack = nullptr) 787 { 788 return setWeak_(nodeAddr, ref, freeGlobalCallBack, nativeFinalizeCallBack); 789 } 790 ClearWeak(uintptr_t nodeAddr)791 inline uintptr_t ClearWeak(uintptr_t nodeAddr) 792 { 793 return clearWeak_(nodeAddr); 794 } 795 IsWeak(uintptr_t addr)796 inline bool IsWeak(uintptr_t addr) const 797 { 798 return isWeak_(addr); 799 } 800 EnableCrossThreadExecution()801 void EnableCrossThreadExecution() 802 { 803 glueData_.allowCrossThreadExecution_ = true; 804 } 805 IsCrossThreadExecutionEnable()806 bool IsCrossThreadExecutionEnable() const 807 { 808 return glueData_.allowCrossThreadExecution_; 809 } 810 IsFrameDropped()811 bool IsFrameDropped() 812 { 813 return glueData_.isFrameDropped_; 814 } 815 SetFrameDroppedState()816 void SetFrameDroppedState() 817 { 818 glueData_.isFrameDropped_ = true; 819 } 820 ResetFrameDroppedState()821 void ResetFrameDroppedState() 822 { 823 glueData_.isFrameDropped_ = false; 824 } 825 IsEntryFrameDroppedTrue()826 bool IsEntryFrameDroppedTrue() 827 { 828 return glueData_.entryFrameDroppedState_ == FrameDroppedState::StateTrue; 829 } 830 IsEntryFrameDroppedPending()831 bool IsEntryFrameDroppedPending() 832 { 833 return glueData_.entryFrameDroppedState_ == FrameDroppedState::StatePending; 834 } 835 SetEntryFrameDroppedState()836 void SetEntryFrameDroppedState() 837 { 838 glueData_.entryFrameDroppedState_ = FrameDroppedState::StateTrue; 839 } 840 ResetEntryFrameDroppedState()841 void ResetEntryFrameDroppedState() 842 { 843 glueData_.entryFrameDroppedState_ = FrameDroppedState::StateFalse; 844 } 845 PendingEntryFrameDroppedState()846 void PendingEntryFrameDroppedState() 847 { 848 glueData_.entryFrameDroppedState_ = FrameDroppedState::StatePending; 849 } 850 IsDebugMode()851 bool IsDebugMode() 852 { 853 return glueData_.isDebugMode_; 854 } 855 SetDebugModeState()856 void SetDebugModeState() 857 { 858 glueData_.isDebugMode_ = true; 859 } 860 ResetDebugModeState()861 void ResetDebugModeState() 862 { 863 glueData_.isDebugMode_ = false; 864 } 865 866 template<typename T, typename V> SetInterruptValue(V value)867 void SetInterruptValue(V value) 868 { 869 volatile auto interruptValue = 870 reinterpret_cast<volatile std::atomic<uint64_t> *>(&glueData_.interruptVector_); 871 uint64_t oldValue = interruptValue->load(std::memory_order_relaxed); 872 auto newValue = oldValue; 873 do { 874 newValue = oldValue; 875 T::Set(value, &newValue); 876 } while (!std::atomic_compare_exchange_strong_explicit(interruptValue, &oldValue, newValue, 877 std::memory_order_release, 878 std::memory_order_relaxed)); 879 } 880 881 void InvokeWeakNodeFreeGlobalCallBack(); 882 void InvokeSharedNativePointerCallbacks(); 883 void InvokeWeakNodeNativeFinalizeCallback(); 884 bool IsStartGlobalLeakCheck() const; 885 bool EnableGlobalObjectLeakCheck() const; 886 bool EnableGlobalPrimitiveLeakCheck() const; 887 void WriteToStackTraceFd(std::ostringstream &buffer) const; 888 void SetStackTraceFd(int32_t fd); 889 void CloseStackTraceFd(); IncreaseGlobalNumberCount()890 uint32_t IncreaseGlobalNumberCount() 891 { 892 return ++globalNumberCount_; 893 } 894 SetPropertiesGrowStep(uint32_t step)895 void SetPropertiesGrowStep(uint32_t step) 896 { 897 glueData_.propertiesGrowStep_ = step; 898 } 899 GetPropertiesGrowStep()900 uint32_t GetPropertiesGrowStep() const 901 { 902 return glueData_.propertiesGrowStep_; 903 } 904 SetRandomStatePtr(uint64_t * ptr)905 void SetRandomStatePtr(uint64_t *ptr) 906 { 907 glueData_.randomStatePtr_ = reinterpret_cast<uintptr_t>(ptr); 908 } 909 SetTaskInfo(uintptr_t taskInfo)910 void SetTaskInfo(uintptr_t taskInfo) 911 { 912 glueData_.taskInfo_ = taskInfo; 913 } 914 GetTaskInfo()915 uintptr_t GetTaskInfo() const 916 { 917 return glueData_.taskInfo_; 918 } 919 920 void SetJitCodeMap(JSTaggedType exception, MachineCode* machineCode, std::string &methodName, uintptr_t offset); 921 GetJitCodeMaps()922 std::map<JSTaggedType, JitCodeVector*> &GetJitCodeMaps() 923 { 924 return jitCodeMaps_; 925 } 926 927 struct GlueData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(), 928 BCStubEntries, 929 JSTaggedValue, 930 JSTaggedValue, 931 base::AlignedBool, 932 base::AlignedPointer, 933 base::AlignedPointer, 934 base::AlignedPointer, 935 base::AlignedPointer, 936 base::AlignedPointer, 937 base::AlignedPointer, 938 base::AlignedPointer, 939 base::AlignedPointer, 940 base::AlignedPointer, 941 RTStubEntries, 942 COStubEntries, 943 BuiltinStubEntries, 944 BuiltinHClassEntries, 945 BCDebuggerStubEntries, 946 BaselineStubEntries, 947 base::AlignedUint64, 948 base::AlignedUint64, 949 base::AlignedPointer, 950 base::AlignedUint64, 951 base::AlignedUint64, 952 base::AlignedPointer, 953 base::AlignedPointer, 954 base::AlignedUint64, 955 base::AlignedUint64, 956 JSTaggedValue, 957 base::AlignedBool, 958 base::AlignedBool, 959 base::AlignedUint32, 960 JSTaggedValue, 961 base::AlignedPointer, 962 BuiltinEntries, 963 base::AlignedBool, 964 base::AlignedPointer, 965 base::AlignedPointer, 966 base::AlignedPointer, 967 base::AlignedUint32, 968 base::AlignedBool> { 969 enum class Index : size_t { 970 BcStubEntriesIndex = 0, 971 ExceptionIndex, 972 GlobalObjIndex, 973 StableArrayElementsGuardiansIndex, 974 CurrentFrameIndex, 975 LeaveFrameIndex, 976 LastFpIndex, 977 NewSpaceAllocationTopAddressIndex, 978 NewSpaceAllocationEndAddressIndex, 979 SOldSpaceAllocationTopAddressIndex, 980 SOldSpaceAllocationEndAddressIndex, 981 SNonMovableSpaceAllocationTopAddressIndex, 982 SNonMovableSpaceAllocationEndAddressIndex, 983 RTStubEntriesIndex, 984 COStubEntriesIndex, 985 BuiltinsStubEntriesIndex, 986 BuiltinHClassEntriesIndex, 987 BcDebuggerStubEntriesIndex, 988 BaselineStubEntriesIndex, 989 GCStateBitFieldIndex, 990 SharedGCStateBitFieldIndex, 991 FrameBaseIndex, 992 StackStartIndex, 993 StackLimitIndex, 994 GlueGlobalEnvIndex, 995 GlobalConstIndex, 996 AllowCrossThreadExecutionIndex, 997 InterruptVectorIndex, 998 IsStartHeapSamplingIndex, 999 IsDebugModeIndex, 1000 IsFrameDroppedIndex, 1001 PropertiesGrowStepIndex, 1002 EntryFrameDroppedStateIndex, 1003 CurrentContextIndex, 1004 BuiltinEntriesIndex, 1005 IsTracingIndex, 1006 UnsharedConstpoolsIndex, 1007 RandomStatePtrIndex, 1008 StateAndFlagsIndex, 1009 TaskInfoIndex, 1010 IsEnableElementsKindIndex, 1011 NumOfMembers 1012 }; 1013 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes); 1014 GetExceptionOffsetGlueData1015 static size_t GetExceptionOffset(bool isArch32) 1016 { 1017 return GetOffset<static_cast<size_t>(Index::ExceptionIndex)>(isArch32); 1018 } 1019 GetGlobalObjOffsetGlueData1020 static size_t GetGlobalObjOffset(bool isArch32) 1021 { 1022 return GetOffset<static_cast<size_t>(Index::GlobalObjIndex)>(isArch32); 1023 } 1024 GetStableArrayElementsGuardiansOffsetGlueData1025 static size_t GetStableArrayElementsGuardiansOffset(bool isArch32) 1026 { 1027 return GetOffset<static_cast<size_t>(Index::StableArrayElementsGuardiansIndex)>(isArch32); 1028 } 1029 GetGlobalConstOffsetGlueData1030 static size_t GetGlobalConstOffset(bool isArch32) 1031 { 1032 return GetOffset<static_cast<size_t>(Index::GlobalConstIndex)>(isArch32); 1033 } 1034 GetGCStateBitFieldOffsetGlueData1035 static size_t GetGCStateBitFieldOffset(bool isArch32) 1036 { 1037 return GetOffset<static_cast<size_t>(Index::GCStateBitFieldIndex)>(isArch32); 1038 } 1039 GetSharedGCStateBitFieldOffsetGlueData1040 static size_t GetSharedGCStateBitFieldOffset(bool isArch32) 1041 { 1042 return GetOffset<static_cast<size_t>(Index::SharedGCStateBitFieldIndex)>(isArch32); 1043 } 1044 GetCurrentFrameOffsetGlueData1045 static size_t GetCurrentFrameOffset(bool isArch32) 1046 { 1047 return GetOffset<static_cast<size_t>(Index::CurrentFrameIndex)>(isArch32); 1048 } 1049 GetLeaveFrameOffsetGlueData1050 static size_t GetLeaveFrameOffset(bool isArch32) 1051 { 1052 return GetOffset<static_cast<size_t>(Index::LeaveFrameIndex)>(isArch32); 1053 } 1054 GetLastFpOffsetGlueData1055 static size_t GetLastFpOffset(bool isArch32) 1056 { 1057 return GetOffset<static_cast<size_t>(Index::LastFpIndex)>(isArch32); 1058 } 1059 GetNewSpaceAllocationTopAddressOffsetGlueData1060 static size_t GetNewSpaceAllocationTopAddressOffset(bool isArch32) 1061 { 1062 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationTopAddressIndex)>(isArch32); 1063 } 1064 GetNewSpaceAllocationEndAddressOffsetGlueData1065 static size_t GetNewSpaceAllocationEndAddressOffset(bool isArch32) 1066 { 1067 return GetOffset<static_cast<size_t>(Index::NewSpaceAllocationEndAddressIndex)>(isArch32); 1068 } 1069 GetSOldSpaceAllocationTopAddressOffsetGlueData1070 static size_t GetSOldSpaceAllocationTopAddressOffset(bool isArch32) 1071 { 1072 return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationTopAddressIndex)>(isArch32); 1073 } 1074 GetSOldSpaceAllocationEndAddressOffsetGlueData1075 static size_t GetSOldSpaceAllocationEndAddressOffset(bool isArch32) 1076 { 1077 return GetOffset<static_cast<size_t>(Index::SOldSpaceAllocationEndAddressIndex)>(isArch32); 1078 } 1079 GetSNonMovableSpaceAllocationTopAddressOffsetGlueData1080 static size_t GetSNonMovableSpaceAllocationTopAddressOffset(bool isArch32) 1081 { 1082 return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationTopAddressIndex)>(isArch32); 1083 } 1084 GetSNonMovableSpaceAllocationEndAddressOffsetGlueData1085 static size_t GetSNonMovableSpaceAllocationEndAddressOffset(bool isArch32) 1086 { 1087 return GetOffset<static_cast<size_t>(Index::SNonMovableSpaceAllocationEndAddressIndex)>(isArch32); 1088 } 1089 GetBCStubEntriesOffsetGlueData1090 static size_t GetBCStubEntriesOffset(bool isArch32) 1091 { 1092 return GetOffset<static_cast<size_t>(Index::BcStubEntriesIndex)>(isArch32); 1093 } 1094 GetRTStubEntriesOffsetGlueData1095 static size_t GetRTStubEntriesOffset(bool isArch32) 1096 { 1097 return GetOffset<static_cast<size_t>(Index::RTStubEntriesIndex)>(isArch32); 1098 } 1099 GetCOStubEntriesOffsetGlueData1100 static size_t GetCOStubEntriesOffset(bool isArch32) 1101 { 1102 return GetOffset<static_cast<size_t>(Index::COStubEntriesIndex)>(isArch32); 1103 } 1104 GetBaselineStubEntriesOffsetGlueData1105 static size_t GetBaselineStubEntriesOffset(bool isArch32) 1106 { 1107 return GetOffset<static_cast<size_t>(Index::BaselineStubEntriesIndex)>(isArch32); 1108 } 1109 GetBuiltinsStubEntriesOffsetGlueData1110 static size_t GetBuiltinsStubEntriesOffset(bool isArch32) 1111 { 1112 return GetOffset<static_cast<size_t>(Index::BuiltinsStubEntriesIndex)>(isArch32); 1113 } 1114 GetBuiltinHClassEntriesOffsetGlueData1115 static size_t GetBuiltinHClassEntriesOffset(bool isArch32) 1116 { 1117 return GetOffset<static_cast<size_t>(Index::BuiltinHClassEntriesIndex)>(isArch32); 1118 } 1119 GetBuiltinHClassOffsetGlueData1120 static size_t GetBuiltinHClassOffset(BuiltinTypeId type, bool isArch32) 1121 { 1122 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetBuiltinHClassOffset(type); 1123 } 1124 GetBuiltinInstanceHClassOffsetGlueData1125 static size_t GetBuiltinInstanceHClassOffset(BuiltinTypeId type, bool isArch32) 1126 { 1127 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetInstanceHClassOffset(type); 1128 } 1129 GetBuiltinPrototypeHClassOffsetGlueData1130 static size_t GetBuiltinPrototypeHClassOffset(BuiltinTypeId type, bool isArch32) 1131 { 1132 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetPrototypeHClassOffset(type); 1133 } 1134 GetBuiltinPrototypeOfPrototypeHClassOffsetGlueData1135 static size_t GetBuiltinPrototypeOfPrototypeHClassOffset(BuiltinTypeId type, bool isArch32) 1136 { 1137 return GetBuiltinHClassEntriesOffset(isArch32) + 1138 BuiltinHClassEntries::GetPrototypeOfPrototypeHClassOffset(type); 1139 } 1140 GetBuiltinExtraHClassOffsetGlueData1141 static size_t GetBuiltinExtraHClassOffset(BuiltinTypeId type, bool isArch32) 1142 { 1143 return GetBuiltinHClassEntriesOffset(isArch32) + BuiltinHClassEntries::GetExtraHClassOffset(type); 1144 } 1145 GetBCDebuggerStubEntriesOffsetGlueData1146 static size_t GetBCDebuggerStubEntriesOffset(bool isArch32) 1147 { 1148 return GetOffset<static_cast<size_t>(Index::BcDebuggerStubEntriesIndex)>(isArch32); 1149 } 1150 GetFrameBaseOffsetGlueData1151 static size_t GetFrameBaseOffset(bool isArch32) 1152 { 1153 return GetOffset<static_cast<size_t>(Index::FrameBaseIndex)>(isArch32); 1154 } 1155 GetStackLimitOffsetGlueData1156 static size_t GetStackLimitOffset(bool isArch32) 1157 { 1158 return GetOffset<static_cast<size_t>(Index::StackLimitIndex)>(isArch32); 1159 } 1160 GetGlueGlobalEnvOffsetGlueData1161 static size_t GetGlueGlobalEnvOffset(bool isArch32) 1162 { 1163 return GetOffset<static_cast<size_t>(Index::GlueGlobalEnvIndex)>(isArch32); 1164 } 1165 GetAllowCrossThreadExecutionOffsetGlueData1166 static size_t GetAllowCrossThreadExecutionOffset(bool isArch32) 1167 { 1168 return GetOffset<static_cast<size_t>(Index::AllowCrossThreadExecutionIndex)>(isArch32); 1169 } 1170 GetInterruptVectorOffsetGlueData1171 static size_t GetInterruptVectorOffset(bool isArch32) 1172 { 1173 return GetOffset<static_cast<size_t>(Index::InterruptVectorIndex)>(isArch32); 1174 } 1175 GetIsStartHeapSamplingOffsetGlueData1176 static size_t GetIsStartHeapSamplingOffset(bool isArch32) 1177 { 1178 return GetOffset<static_cast<size_t>(Index::IsStartHeapSamplingIndex)>(isArch32); 1179 } 1180 GetIsDebugModeOffsetGlueData1181 static size_t GetIsDebugModeOffset(bool isArch32) 1182 { 1183 return GetOffset<static_cast<size_t>(Index::IsDebugModeIndex)>(isArch32); 1184 } 1185 GetIsFrameDroppedOffsetGlueData1186 static size_t GetIsFrameDroppedOffset(bool isArch32) 1187 { 1188 return GetOffset<static_cast<size_t>(Index::IsFrameDroppedIndex)>(isArch32); 1189 } 1190 GetPropertiesGrowStepOffsetGlueData1191 static size_t GetPropertiesGrowStepOffset(bool isArch32) 1192 { 1193 return GetOffset<static_cast<size_t>(Index::PropertiesGrowStepIndex)>(isArch32); 1194 } 1195 GetEntryFrameDroppedStateOffsetGlueData1196 static size_t GetEntryFrameDroppedStateOffset(bool isArch32) 1197 { 1198 return GetOffset<static_cast<size_t>(Index::EntryFrameDroppedStateIndex)>(isArch32); 1199 } 1200 GetCurrentContextOffsetGlueData1201 static size_t GetCurrentContextOffset(bool isArch32) 1202 { 1203 return GetOffset<static_cast<size_t>(Index::CurrentContextIndex)>(isArch32); 1204 } 1205 GetBuiltinEntriesOffsetGlueData1206 static size_t GetBuiltinEntriesOffset(bool isArch32) 1207 { 1208 return GetOffset<static_cast<size_t>(Index::BuiltinEntriesIndex)>(isArch32); 1209 } 1210 GetIsTracingOffsetGlueData1211 static size_t GetIsTracingOffset(bool isArch32) 1212 { 1213 return GetOffset<static_cast<size_t>(Index::IsTracingIndex)>(isArch32); 1214 } 1215 GetUnSharedConstpoolsOffsetGlueData1216 static size_t GetUnSharedConstpoolsOffset(bool isArch32) 1217 { 1218 return GetOffset<static_cast<size_t>(Index::UnsharedConstpoolsIndex)>(isArch32); 1219 } 1220 GetStateAndFlagsOffsetGlueData1221 static size_t GetStateAndFlagsOffset(bool isArch32) 1222 { 1223 return GetOffset<static_cast<size_t>(Index::StateAndFlagsIndex)>(isArch32); 1224 } 1225 GetRandomStatePtrOffsetGlueData1226 static size_t GetRandomStatePtrOffset(bool isArch32) 1227 { 1228 return GetOffset<static_cast<size_t>(Index::RandomStatePtrIndex)>(isArch32); 1229 } 1230 GetTaskInfoOffsetGlueData1231 static size_t GetTaskInfoOffset(bool isArch32) 1232 { 1233 return GetOffset<static_cast<size_t>(Index::TaskInfoIndex)>(isArch32); 1234 } 1235 GetIsEnableElementsKindOffsetGlueData1236 static size_t GetIsEnableElementsKindOffset(bool isArch32) 1237 { 1238 return GetOffset<static_cast<size_t>(Index::IsEnableElementsKindIndex)>(isArch32); 1239 } 1240 1241 alignas(EAS) BCStubEntries bcStubEntries_ {}; 1242 alignas(EAS) JSTaggedValue exception_ {JSTaggedValue::Hole()}; 1243 alignas(EAS) JSTaggedValue globalObject_ {JSTaggedValue::Hole()}; 1244 alignas(EAS) bool stableArrayElementsGuardians_ {true}; 1245 alignas(EAS) JSTaggedType *currentFrame_ {nullptr}; 1246 alignas(EAS) JSTaggedType *leaveFrame_ {nullptr}; 1247 alignas(EAS) JSTaggedType *lastFp_ {nullptr}; 1248 alignas(EAS) const uintptr_t *newSpaceAllocationTopAddress_ {nullptr}; 1249 alignas(EAS) const uintptr_t *newSpaceAllocationEndAddress_ {nullptr}; 1250 alignas(EAS) const uintptr_t *sOldSpaceAllocationTopAddress_ {nullptr}; 1251 alignas(EAS) const uintptr_t *sOldSpaceAllocationEndAddress_ {nullptr}; 1252 alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationTopAddress_ {nullptr}; 1253 alignas(EAS) const uintptr_t *sNonMovableSpaceAllocationEndAddress_ {nullptr}; 1254 alignas(EAS) RTStubEntries rtStubEntries_ {}; 1255 alignas(EAS) COStubEntries coStubEntries_ {}; 1256 alignas(EAS) BuiltinStubEntries builtinStubEntries_ {}; 1257 alignas(EAS) BuiltinHClassEntries builtinHClassEntries_ {}; 1258 alignas(EAS) BCDebuggerStubEntries bcDebuggerStubEntries_ {}; 1259 alignas(EAS) BaselineStubEntries baselineStubEntries_ {}; 1260 alignas(EAS) volatile uint64_t gcStateBitField_ {0ULL}; 1261 alignas(EAS) volatile uint64_t sharedGCStateBitField_ {0ULL}; 1262 alignas(EAS) JSTaggedType *frameBase_ {nullptr}; 1263 alignas(EAS) uint64_t stackStart_ {0}; 1264 alignas(EAS) uint64_t stackLimit_ {0}; 1265 alignas(EAS) GlobalEnv *glueGlobalEnv_ {nullptr}; 1266 alignas(EAS) GlobalEnvConstants *globalConst_ {nullptr}; 1267 alignas(EAS) bool allowCrossThreadExecution_ {false}; 1268 alignas(EAS) volatile uint64_t interruptVector_ {0}; 1269 alignas(EAS) JSTaggedValue isStartHeapSampling_ {JSTaggedValue::False()}; 1270 alignas(EAS) bool isDebugMode_ {false}; 1271 alignas(EAS) bool isFrameDropped_ {false}; 1272 alignas(EAS) uint32_t propertiesGrowStep_ {JSObjectResizingStrategy::PROPERTIES_GROW_SIZE}; 1273 alignas(EAS) uint64_t entryFrameDroppedState_ {FrameDroppedState::StateFalse}; 1274 alignas(EAS) EcmaContext *currentContext_ {nullptr}; 1275 alignas(EAS) BuiltinEntries builtinEntries_ {}; 1276 alignas(EAS) bool isTracing_ {false}; 1277 alignas(EAS) uintptr_t unsharedConstpools_ {0}; 1278 alignas(EAS) uintptr_t randomStatePtr_ {0}; 1279 alignas(EAS) ThreadStateAndFlags stateAndFlags_ {}; 1280 alignas(EAS) uintptr_t taskInfo_ {0}; 1281 alignas(EAS) bool isEnableElementsKind_ {false}; 1282 }; 1283 STATIC_ASSERT_EQ_ARCH(sizeof(GlueData), GlueData::SizeArch32, GlueData::SizeArch64); 1284 1285 void PushContext(EcmaContext *context); 1286 void PopContext(); 1287 GetCurrentEcmaContext()1288 EcmaContext *GetCurrentEcmaContext() const 1289 { 1290 return glueData_.currentContext_; 1291 } 1292 GetSingleCharTable()1293 JSTaggedValue GetSingleCharTable() const 1294 { 1295 ASSERT(glueData_.globalConst_->GetSingleCharTable() != JSTaggedValue::Hole()); 1296 return glueData_.globalConst_->GetSingleCharTable(); 1297 } 1298 1299 void SwitchCurrentContext(EcmaContext *currentContext, bool isInIterate = false); 1300 GetEcmaContexts()1301 CVector<EcmaContext *> GetEcmaContexts() 1302 { 1303 return contexts_; 1304 } 1305 1306 bool IsPropertyCacheCleared() const; 1307 1308 bool EraseContext(EcmaContext *context); 1309 void ClearContextCachedConstantPool(); 1310 1311 const GlobalEnvConstants *GetFirstGlobalConst() const; 1312 bool IsAllContextsInitialized() const; 1313 bool IsReadyToUpdateDetector() const; 1314 Area *GetOrCreateRegExpCache(); 1315 1316 void InitializeBuiltinObject(const std::string& key); 1317 void InitializeBuiltinObject(); 1318 FullMarkRequest()1319 bool FullMarkRequest() const 1320 { 1321 return fullMarkRequest_; 1322 } 1323 SetFullMarkRequest()1324 void SetFullMarkRequest() 1325 { 1326 fullMarkRequest_ = true; 1327 } 1328 ResetFullMarkRequest()1329 void ResetFullMarkRequest() 1330 { 1331 fullMarkRequest_ = false; 1332 } 1333 SetProcessingLocalToSharedRset(bool processing)1334 void SetProcessingLocalToSharedRset(bool processing) 1335 { 1336 processingLocalToSharedRset_ = processing; 1337 } 1338 IsProcessingLocalToSharedRset()1339 bool IsProcessingLocalToSharedRset() const 1340 { 1341 return processingLocalToSharedRset_; 1342 } 1343 IsThreadSafe()1344 inline bool IsThreadSafe() const 1345 { 1346 return IsMainThread() || HasSuspendRequest(); 1347 } 1348 IsSuspended()1349 bool IsSuspended() const 1350 { 1351 bool f = ReadFlag(ThreadFlag::SUSPEND_REQUEST); 1352 bool s = (GetState() != ThreadState::RUNNING); 1353 return f && s; 1354 } 1355 HasSuspendRequest()1356 inline bool HasSuspendRequest() const 1357 { 1358 return ReadFlag(ThreadFlag::SUSPEND_REQUEST); 1359 } 1360 CheckSafepointIfSuspended()1361 void CheckSafepointIfSuspended() 1362 { 1363 if (HasSuspendRequest()) { 1364 WaitSuspension(); 1365 } 1366 } 1367 IsInSuspendedState()1368 bool IsInSuspendedState() const 1369 { 1370 return GetState() == ThreadState::IS_SUSPENDED; 1371 } 1372 IsInRunningState()1373 bool IsInRunningState() const 1374 { 1375 return GetState() == ThreadState::RUNNING; 1376 } 1377 1378 bool IsInRunningStateOrProfiling() const; 1379 GetState()1380 ThreadState GetState() const 1381 { 1382 uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire); 1383 return static_cast<enum ThreadState>(stateAndFlags >> THREAD_STATE_OFFSET); 1384 } 1385 void PUBLIC_API UpdateState(ThreadState newState); 1386 void SuspendThread(bool internalSuspend, SuspendBarrier* barrier = nullptr); 1387 void ResumeThread(bool internalSuspend); 1388 void WaitSuspension(); 1389 static bool IsMainThread(); 1390 PUBLIC_API void ManagedCodeBegin(); 1391 PUBLIC_API void ManagedCodeEnd(); 1392 #ifndef NDEBUG 1393 bool IsInManagedState() const; 1394 MutatorLock::MutatorLockState GetMutatorLockState() const; 1395 void SetMutatorLockState(MutatorLock::MutatorLockState newState); 1396 #endif SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback & callback)1397 void SetWeakFinalizeTaskCallback(const WeakFinalizeTaskCallback &callback) 1398 { 1399 finalizeTaskCallback_ = callback; 1400 } 1401 GetJobId()1402 uint64_t GetJobId() 1403 { 1404 if (jobId_ == UINT64_MAX) { 1405 jobId_ = 0; 1406 } 1407 return ++jobId_; 1408 } 1409 SetAsyncCleanTaskCallback(const NativePointerTaskCallback & callback)1410 void SetAsyncCleanTaskCallback(const NativePointerTaskCallback &callback) 1411 { 1412 asyncCleanTaskCb_ = callback; 1413 } 1414 GetAsyncCleanTaskCallback()1415 NativePointerTaskCallback GetAsyncCleanTaskCallback() const 1416 { 1417 return asyncCleanTaskCb_; 1418 } 1419 1420 static void RegisterThread(JSThread *jsThread); 1421 1422 static void UnregisterThread(JSThread *jsThread); 1423 IsJSThread()1424 bool IsJSThread() const 1425 { 1426 return threadType_ == ThreadType::JS_THREAD; 1427 } 1428 IsJitThread()1429 bool IsJitThread() const 1430 { 1431 return threadType_ == ThreadType::JIT_THREAD; 1432 } 1433 IsDaemonThread()1434 bool IsDaemonThread() const 1435 { 1436 return threadType_ == ThreadType::DAEMON_THREAD; 1437 } 1438 1439 // Daemon_Thread and JS_Thread have some difference in transition, for example, when transition to running, 1440 // JS_Thread may take some local_gc actions, but Daemon_Thread do not need. 1441 void TransferDaemonThreadToRunning(); 1442 GetJitLock()1443 RecursiveMutex *GetJitLock() 1444 { 1445 return &jitMutex_; 1446 } 1447 GetProfileTypeAccessorLock()1448 RecursiveMutex &GetProfileTypeAccessorLock() 1449 { 1450 return profileTypeAccessorLockMutex_; 1451 } 1452 SetMachineCodeLowMemory(bool isLow)1453 void SetMachineCodeLowMemory(bool isLow) 1454 { 1455 machineCodeLowMemory_ = isLow; 1456 } 1457 IsMachineCodeLowMemory()1458 bool IsMachineCodeLowMemory() 1459 { 1460 return machineCodeLowMemory_; 1461 } 1462 GetEnv()1463 void *GetEnv() const 1464 { 1465 return env_; 1466 } 1467 SetEnv(void * env)1468 void SetEnv(void *env) 1469 { 1470 env_ = env; 1471 } 1472 SetIsInConcurrentScope(bool flag)1473 void SetIsInConcurrentScope(bool flag) 1474 { 1475 isInConcurrentScope_ = flag; 1476 } 1477 IsInConcurrentScope()1478 bool IsInConcurrentScope() 1479 { 1480 return isInConcurrentScope_; 1481 } 1482 EnableEdenGCBarriers()1483 void EnableEdenGCBarriers() 1484 { 1485 auto setValueStub = GetFastStubEntry(kungfu::CommonStubCSigns::SetValueWithEdenBarrier); 1486 SetFastStubEntry(kungfu::CommonStubCSigns::SetValueWithBarrier, setValueStub); 1487 auto markStub = GetRTInterface(kungfu::RuntimeStubCSigns::ID_MarkingBarrierWithEden); 1488 RegisterRTInterface(kungfu::RuntimeStubCSigns::ID_MarkingBarrier, markStub); 1489 auto setNotShareValueStub = GetFastStubEntry(kungfu::CommonStubCSigns::SetNonSValueWithEdenBarrier); 1490 SetFastStubEntry(kungfu::CommonStubCSigns::SetNonSValueWithBarrier, setNotShareValueStub); 1491 auto asmCheckStub = GetRTInterface(kungfu::RuntimeStubCSigns::ID_ASMWriteBarrierWithEden); 1492 RegisterRTInterface(kungfu::RuntimeStubCSigns::ID_ASMFastWriteBarrier, asmCheckStub); 1493 } 1494 GetDateUtils()1495 DateUtils *GetDateUtils() const 1496 { 1497 return dateUtils_; 1498 } 1499 1500 #ifndef NDEBUG LaunchSuspendAll()1501 inline void LaunchSuspendAll() 1502 { 1503 launchedSuspendAll_ = true; 1504 } 1505 HasLaunchedSuspendAll()1506 inline bool HasLaunchedSuspendAll() const 1507 { 1508 return launchedSuspendAll_; 1509 } 1510 CompleteSuspendAll()1511 inline void CompleteSuspendAll() 1512 { 1513 launchedSuspendAll_ = false; 1514 } 1515 #endif 1516 1517 protected: SetThreadId()1518 void SetThreadId() 1519 { 1520 id_.store(JSThread::GetCurrentThreadId(), std::memory_order_release); 1521 } 1522 1523 // When call EcmaVM::PreFork(), the std::thread for Daemon_Thread is finished, but the Daemon_Thread instance 1524 // is still alive, and need to reset ThreadId to 0. ResetThreadId()1525 void ResetThreadId() 1526 { 1527 id_.store(0, std::memory_order_release); 1528 } 1529 private: 1530 NO_COPY_SEMANTIC(JSThread); 1531 NO_MOVE_SEMANTIC(JSThread); SetGlobalConst(GlobalEnvConstants * globalConst)1532 void SetGlobalConst(GlobalEnvConstants *globalConst) 1533 { 1534 glueData_.globalConst_ = globalConst; 1535 } SetCurrentEcmaContext(EcmaContext * context)1536 void SetCurrentEcmaContext(EcmaContext *context) 1537 { 1538 glueData_.currentContext_ = context; 1539 } 1540 SetArrayHClassIndexMap(const CMap<ElementsKind,std::pair<ConstantIndex,ConstantIndex>> & map)1541 void SetArrayHClassIndexMap(const CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> &map) 1542 { 1543 arrayHClassIndexMap_ = map; 1544 } 1545 1546 void TransferFromRunningToSuspended(ThreadState newState); 1547 1548 void TransferToRunning(); 1549 1550 inline void StoreState(ThreadState newState); 1551 1552 void StoreRunningState(ThreadState newState); 1553 1554 void StoreSuspendedState(ThreadState newState); 1555 ReadFlag(ThreadFlag flag)1556 bool ReadFlag(ThreadFlag flag) const 1557 { 1558 uint32_t stateAndFlags = glueData_.stateAndFlags_.asAtomicInt.load(std::memory_order_acquire); 1559 uint16_t flags = (stateAndFlags & THREAD_FLAGS_MASK); 1560 return (flags & static_cast<uint16_t>(flag)) != 0; 1561 } 1562 SetFlag(ThreadFlag flag)1563 void SetFlag(ThreadFlag flag) 1564 { 1565 glueData_.stateAndFlags_.asAtomicInt.fetch_or(flag, std::memory_order_seq_cst); 1566 } 1567 ClearFlag(ThreadFlag flag)1568 void ClearFlag(ThreadFlag flag) 1569 { 1570 glueData_.stateAndFlags_.asAtomicInt.fetch_and(UINT32_MAX ^ flag, std::memory_order_seq_cst); 1571 } 1572 1573 void DumpStack() DUMP_API_ATTR; 1574 1575 static size_t GetAsmStackLimit(); 1576 1577 static constexpr size_t DEFAULT_MAX_SYSTEM_STACK_SIZE = 8_MB; 1578 1579 GlueData glueData_; 1580 std::atomic<ThreadId> id_ {0}; 1581 EcmaVM *vm_ {nullptr}; 1582 void *env_ {nullptr}; 1583 Area *regExpCache_ {nullptr}; 1584 1585 // MM: handles, global-handles, and aot-stubs. 1586 int nestedLevel_ = 0; 1587 NativeAreaAllocator *nativeAreaAllocator_ {nullptr}; 1588 HeapRegionAllocator *heapRegionAllocator_ {nullptr}; 1589 bool runningNativeFinalizeCallbacks_ {false}; 1590 std::vector<std::pair<WeakClearCallback, void *>> weakNodeFreeGlobalCallbacks_ {}; 1591 std::vector<std::pair<WeakClearCallback, void *>> weakNodeNativeFinalizeCallbacks_ {}; 1592 1593 EcmaGlobalStorage<Node> *globalStorage_ {nullptr}; 1594 EcmaGlobalStorage<DebugNode> *globalDebugStorage_ {nullptr}; 1595 int32_t stackTraceFd_ {-1}; 1596 1597 std::function<uintptr_t(JSTaggedType value)> newGlobalHandle_; 1598 std::function<void(uintptr_t nodeAddr)> disposeGlobalHandle_; 1599 std::function<uintptr_t(uintptr_t nodeAddr, void *ref, WeakClearCallback freeGlobalCallBack_, 1600 WeakClearCallback nativeFinalizeCallBack)> setWeak_; 1601 std::function<uintptr_t(uintptr_t nodeAddr)> clearWeak_; 1602 std::function<bool(uintptr_t addr)> isWeak_; 1603 NativePointerTaskCallback asyncCleanTaskCb_ {nullptr}; 1604 WeakFinalizeTaskCallback finalizeTaskCallback_ {nullptr}; 1605 uint32_t globalNumberCount_ {0}; 1606 1607 // Run-time state 1608 bool getStackSignal_ {false}; 1609 bool runtimeState_ {false}; 1610 bool isAsmInterpreter_ {false}; 1611 VmThreadControl *vmThreadControl_ {nullptr}; 1612 bool enableStackSourceFile_ {true}; 1613 bool enableLazyBuiltins_ {false}; 1614 bool readyForGCIterating_ {false}; 1615 // CpuProfiler 1616 bool isProfiling_ {false}; 1617 bool gcState_ {false}; 1618 std::atomic_bool needProfiling_ {false}; 1619 std::string profileName_ {""}; 1620 1621 bool finalizationCheckState_ {false}; 1622 // Shared heap 1623 bool isMainThread_ {false}; 1624 bool fullMarkRequest_ {false}; 1625 // Shared heap collect local heap Rset 1626 bool processingLocalToSharedRset_ {false}; 1627 1628 // { ElementsKind, (hclass, hclassWithProto) } 1629 CMap<ElementsKind, std::pair<ConstantIndex, ConstantIndex>> arrayHClassIndexMap_; 1630 CMap<JSHClass *, GlobalIndex> ctorHclassEntries_; 1631 1632 CVector<EcmaContext *> contexts_; 1633 EcmaContext *currentContext_ {nullptr}; 1634 1635 Mutex suspendLock_; 1636 int32_t suspendCount_ {0}; 1637 ConditionVariable suspendCondVar_; 1638 SuspendBarrier *suspendBarrier_ {nullptr}; 1639 1640 uint64_t jobId_ {0}; 1641 1642 ThreadType threadType_ {ThreadType::JS_THREAD}; 1643 RecursiveMutex jitMutex_; 1644 bool machineCodeLowMemory_ {false}; 1645 RecursiveMutex profileTypeAccessorLockMutex_; 1646 DateUtils *dateUtils_ {nullptr}; 1647 1648 #ifndef NDEBUG 1649 MutatorLock::MutatorLockState mutatorLockState_ = MutatorLock::MutatorLockState::UNLOCKED; 1650 std::atomic<bool> launchedSuspendAll_ {false}; 1651 #endif 1652 // Collect a map from JsError to MachineCode objects, JsError objects with stack frame generated by jit in the map. 1653 // It will be used to keep MachineCode objects alive (for dump) before JsError object be free. 1654 std::map<JSTaggedType, JitCodeVector*> jitCodeMaps_; 1655 1656 std::atomic<bool> needTermination_ {false}; 1657 std::atomic<bool> hasTerminated_ {false}; 1658 1659 bool isInConcurrentScope_ {false}; 1660 1661 friend class GlobalHandleCollection; 1662 friend class EcmaVM; 1663 friend class EcmaContext; 1664 friend class JitVM; 1665 }; 1666 } // namespace panda::ecmascript 1667 #endif // ECMASCRIPT_JS_THREAD_H 1668