• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_JS_THREAD_H
17 #define ECMASCRIPT_JS_THREAD_H
18 
19 #include "include/managed_thread.h"
20 
21 #include "ecmascript/compiler/fast_stub_define.h"
22 #include "ecmascript/ecma_global_storage.h"
23 #include "ecmascript/frames.h"
24 #include "ecmascript/global_env_constants.h"
25 #include "ecmascript/mem/object_xray.h"
26 
27 namespace panda::ecmascript {
28 class EcmaVM;
29 class HeapRegionAllocator;
30 class InternalCallParams;
31 class PropertiesCache;
32 enum class MarkStatus : uint8_t {
33     READY_TO_MARK,
34     MARKING,
35     MARK_FINISHED,
36 };
37 
38 class JSThread : public ManagedThread {
39 public:
40     static constexpr int CONCURRENT_MARKING_BITFIELD_NUM = 2;
41     using MarkStatusBits = BitField<MarkStatus, 0, CONCURRENT_MARKING_BITFIELD_NUM>;
42     using Address = uintptr_t;
43     using VMNeedSuspensionBit = MarkStatusBits::NextFlag;
44     using VMHasSuspendedBit = VMNeedSuspensionBit::NextFlag;
Cast(ManagedThread * thread)45     static JSThread *Cast(ManagedThread *thread)
46     {
47         ASSERT(thread != nullptr);
48         return reinterpret_cast<JSThread *>(thread);
49     }
50 
51     JSThread(Runtime *runtime, PandaVM *vm);
52 
53     ~JSThread() override;
54 
55     EcmaVM *GetEcmaVM() const;
56 
57     static JSThread *Create(Runtime *runtime, PandaVM *vm);
58 
GetNestedLevel()59     int GetNestedLevel() const
60     {
61         return nestedLevel_;
62     }
63 
SetNestedLevel(int level)64     void SetNestedLevel(int level)
65     {
66         nestedLevel_ = level;
67     }
68 
GetCurrentSPFrame()69     const JSTaggedType *GetCurrentSPFrame() const
70     {
71         return currentFrame_;
72     }
73 
SetCurrentSPFrame(JSTaggedType * sp)74     void SetCurrentSPFrame(JSTaggedType *sp)
75     {
76         currentFrame_ = sp;
77     }
78 
GetLastLeaveFrame()79     const JSTaggedType *GetLastLeaveFrame() const
80     {
81         return leaveFrame_;
82     }
83 
SetLastLeaveFrame(JSTaggedType * sp)84     void SetLastLeaveFrame(JSTaggedType *sp)
85     {
86         leaveFrame_ = sp;
87     }
88 
89     bool DoStackOverflowCheck(const JSTaggedType *sp);
90 
GetNativeAreaAllocator()91     NativeAreaAllocator *GetNativeAreaAllocator() const
92     {
93         return nativeAreaAllocator_;
94     }
95 
GetHeapRegionAllocator()96     HeapRegionAllocator *GetHeapRegionAllocator() const
97     {
98         return heapRegionAllocator_;
99     }
100 
101     void Iterate(const RootVisitor &v0, const RootRangeVisitor &v1);
102 
103     PUBLIC_API uintptr_t *ExpandHandleStorage();
104     void ShrinkHandleStorage(int prevIndex);
105 
GetHandleScopeStorageNext()106     JSTaggedType *GetHandleScopeStorageNext() const
107     {
108         return handleScopeStorageNext_;
109     }
110 
SetHandleScopeStorageNext(JSTaggedType * value)111     void SetHandleScopeStorageNext(JSTaggedType *value)
112     {
113         handleScopeStorageNext_ = value;
114     }
115 
GetHandleScopeStorageEnd()116     JSTaggedType *GetHandleScopeStorageEnd() const
117     {
118         return handleScopeStorageEnd_;
119     }
120 
SetHandleScopeStorageEnd(JSTaggedType * value)121     void SetHandleScopeStorageEnd(JSTaggedType *value)
122     {
123         handleScopeStorageEnd_ = value;
124     }
125 
GetCurrentHandleStorageIndex()126     int GetCurrentHandleStorageIndex()
127     {
128         return currentHandleStorageIndex_;
129     }
130 
HandleScopeCountAdd()131     void HandleScopeCountAdd()
132     {
133         handleScopeCount_++;
134     }
135 
HandleScopeCountDec()136     void HandleScopeCountDec()
137     {
138         handleScopeCount_--;
139     }
140 
141     void SetException(JSTaggedValue exception);
142 
GetException()143     JSTaggedValue GetException() const
144     {
145         return exception_;
146     }
147 
HasPendingException()148     bool HasPendingException() const
149     {
150         return !exception_.IsHole();
151     }
152 
153     void ClearException();
154 
GetEcmaGlobalStorage()155     EcmaGlobalStorage *GetEcmaGlobalStorage() const
156     {
157         return globalStorage_;
158     }
159 
SetGlobalObject(JSTaggedValue globalObject)160     void SetGlobalObject(JSTaggedValue globalObject)
161     {
162         globalObject_ = globalObject;
163     }
164 
GlobalConstants()165     const GlobalEnvConstants *GlobalConstants() const
166     {
167         return &globalConst_;
168     }
169 
170     void NotifyStableArrayElementsGuardians(JSHandle<JSObject> receiver);
171 
IsStableArrayElementsGuardiansInvalid()172     bool IsStableArrayElementsGuardiansInvalid() const
173     {
174         return !stableArrayElementsGuardians_;
175     }
176 
177     void ResetGuardians();
178 
179     JSTaggedValue GetCurrentLexenv() const;
180 
SetRuntimeFunction(uint32_t id,Address functionAddress)181     void SetRuntimeFunction(uint32_t id, Address functionAddress)
182     {
183         ASSERT(id < kungfu::EXTERNAL_RUNTIME_STUB_MAXCOUNT);
184         runtimeFunctions_[id] = functionAddress;
185     }
186 
GetFastStubEntry(uint32_t id)187     Address GetFastStubEntry(uint32_t id)
188     {
189         ASSERT(id < kungfu::FAST_STUB_MAXCOUNT);
190         return fastStubEntries_[id];
191     }
192 
SetFastStubEntry(uint32_t id,Address entry)193     void SetFastStubEntry(uint32_t id, Address entry)
194     {
195         ASSERT(id < kungfu::FAST_STUB_MAXCOUNT);
196         fastStubEntries_[id] = entry;
197     }
198 
GetBytecodeHandler()199     Address *GetBytecodeHandler()
200     {
201         return &bytecodeHandlers_[0];
202     }
203 
204     void InitializeFastRuntimeStubs();
205 
206     void LoadStubModule(const char *moduleFile);
207 
GetInternalCallParams()208     InternalCallParams *GetInternalCallParams() const
209     {
210         return internalCallParams_;
211     }
212 
GetThreadId()213     ThreadId GetThreadId() const
214     {
215         return GetId();
216     }
217 
GetCurrentThreadId()218     static ThreadId GetCurrentThreadId()
219     {
220         return os::thread::GetCurrentThreadId();
221     }
222 
223     void IterateWeakEcmaGlobalStorage(const WeakRootVisitor &visitor);
224 
GetPropertiesCache()225     PropertiesCache *GetPropertiesCache() const
226     {
227         return propertiesCache_;
228     }
229 
GetPropertiesCacheOffset()230     static constexpr uint32_t GetPropertiesCacheOffset()
231     {
232         return MEMBER_OFFSET(JSThread, propertiesCache_);
233     }
234 
GetGlobalObjectOffset()235     static constexpr uint32_t GetGlobalObjectOffset()
236     {
237         return MEMBER_OFFSET(JSThread, globalObject_);
238     }
239 
GetGlobalConstantsOffset()240     static constexpr uint32_t GetGlobalConstantsOffset()
241     {
242         return MEMBER_OFFSET(JSThread, globalConst_);
243     }
244 
GetGlobalStorageOffset()245     static constexpr uint32_t GetGlobalStorageOffset()
246     {
247         return MEMBER_OFFSET(JSThread, globalStorage_);
248     }
249 
GetCurrentFrameOffset()250     static constexpr uint32_t GetCurrentFrameOffset()
251     {
252         return MEMBER_OFFSET(JSThread, currentFrame_);
253     }
254 
GetLeaveFrameOffset()255     static constexpr uint32_t GetLeaveFrameOffset()
256     {
257         return MEMBER_OFFSET(JSThread, leaveFrame_);
258     }
259 
GetRuntimeFunctionsOffset()260     static constexpr uint32_t GetRuntimeFunctionsOffset()
261     {
262         return MEMBER_OFFSET(JSThread, runtimeFunctions_);
263     }
264 
GetFastStubEntriesOffset()265     static constexpr uint32_t GetFastStubEntriesOffset()
266     {
267         return MEMBER_OFFSET(JSThread, fastStubEntries_);
268     }
269 
GetBytecodeHandlersOffset()270     static constexpr uint32_t GetBytecodeHandlersOffset()
271     {
272         return MEMBER_OFFSET(JSThread, bytecodeHandlers_);
273     }
274 
SetMarkStatus(MarkStatus status)275     void SetMarkStatus(MarkStatus status)
276     {
277         MarkStatusBits::Set(status, &threadStateBitField_);
278     }
279 
IsReadyToMark()280     bool IsReadyToMark() const
281     {
282         auto status = MarkStatusBits::Decode(threadStateBitField_);
283         return status == MarkStatus::READY_TO_MARK;
284     }
285 
IsMarking()286     bool IsMarking() const
287     {
288         auto status = MarkStatusBits::Decode(threadStateBitField_);
289         return status == MarkStatus::MARKING;
290     }
291 
IsMarkFinished()292     bool IsMarkFinished() const
293     {
294         auto status = MarkStatusBits::Decode(threadStateBitField_);
295         return status == MarkStatus::MARK_FINISHED;
296     }
297 
SetVMNeedSuspension(bool flag)298     void SetVMNeedSuspension(bool flag)
299     {
300         uint64_t newVal = VMNeedSuspensionBit::Update(threadStateBitField_, flag);
301         threadStateBitField_ = newVal;
302     }
303 
VMNeedSuspension()304     bool VMNeedSuspension()
305     {
306         return VMNeedSuspensionBit::Decode(threadStateBitField_);
307     }
308 
309     bool CheckSafepoint();
310 
311     void SuspendVM();
312 
313     void ResumeVM();
314 
315     bool NotifyVMThreadSuspension();
316 
SetVMSuspened(bool flag)317     void SetVMSuspened(bool flag)
318     {
319         uint64_t newVal = VMHasSuspendedBit::Update(threadStateBitField_, flag);
320         threadStateBitField_ = newVal;
321     }
322 
IsSuspended()323     bool IsSuspended()
324     {
325         return VMHasSuspendedBit::Decode(threadStateBitField_);
326     }
327 
SetGetStackSignal(bool isParseStack)328     void SetGetStackSignal(bool isParseStack)
329     {
330         getStackSignal_ = isParseStack;
331     }
332 
GetStackSignal()333     bool GetStackSignal() const
334     {
335         return getStackSignal_;
336     }
337 
SetGcState(bool gcState)338     void SetGcState(bool gcState)
339     {
340         gcState_ = gcState;
341     }
342 
GetGcState()343     bool GetGcState() const
344     {
345         return gcState_;
346     }
GetExceptionOffset()347     static constexpr uint32_t GetExceptionOffset()
348     {
349         return MEMBER_OFFSET(JSThread, exception_);
350     }
351 
GetGlueAddr()352     uintptr_t GetGlueAddr() const
353     {
354         return reinterpret_cast<uintptr_t>(this) + GetExceptionOffset();
355     }
356 
GlueToJSThread(uintptr_t glue)357     static JSThread *GlueToJSThread(uintptr_t glue)
358     {
359         // very careful to modify here
360         return reinterpret_cast<JSThread *>(glue - GetExceptionOffset());
361     }
362 
363     static constexpr uint32_t MAX_RUNTIME_FUNCTIONS = kungfu::EXTERNAL_RUNTIME_STUB_MAXCOUNT;
364     static constexpr uint32_t MAX_BYTECODE_HANDLERS = 0x100;
365     // The sequence must be the same as that of the GLUE members.
366     enum class GlueID : uint8_t {
367         EXCEPTION = 0U,
368         GLOBAL_OBJECT,
369         GLOBAL_CONST,
370         PROPERTIES_CACHE,
371         GLOBAL_STORAGE,
372         CURRENT_FRAME,
373         LEAVE_FRAME,
374         BYTECODE_HANDLERS,
375         RUNTIME_FUNCTIONS,
376         FAST_STUB_ENTRIES,
377         FRAME_STATE_SIZE,
378         GLUE_FRAME_CONSTPOOL,
379         GLUE_FRAME_PROFILE,
380         GLUE_FRAME_ACC,
381         NUMBER_OF_GLUE,
382     };
383 
384 private:
385     NO_COPY_SEMANTIC(JSThread);
386     NO_MOVE_SEMANTIC(JSThread);
387 
388     void DumpStack() DUMP_API_ATTR;
389 
390     static constexpr uint32_t MAX_STACK_SIZE = 128 * 1024;
391     static constexpr uint32_t RESERVE_STACK_SIZE = 128;
392     static const uint32_t NODE_BLOCK_SIZE_LOG2 = 10;
393     static const uint32_t NODE_BLOCK_SIZE = 1U << NODE_BLOCK_SIZE_LOG2;
394     static constexpr int32_t MIN_HANDLE_STORAGE_SIZE = 2;
395 
396     // MM: handles, global-handles, and aot-stubs.
397     int nestedLevel_ = 0;
398     NativeAreaAllocator *nativeAreaAllocator_ {nullptr};
399     HeapRegionAllocator *heapRegionAllocator_ {nullptr};
400     JSTaggedType *handleScopeStorageNext_ {nullptr};
401     JSTaggedType *handleScopeStorageEnd_ {nullptr};
402     std::vector<std::array<JSTaggedType, NODE_BLOCK_SIZE> *> handleStorageNodes_ {};
403     int32_t currentHandleStorageIndex_ {-1};
404     int32_t handleScopeCount_ {0};
405     JSTaggedValue stubCode_ {JSTaggedValue::Hole()};
406 
407     // Run-time state
408     bool getStackSignal_ {false};
409     bool gcState_ {false};
410     volatile uint64_t threadStateBitField_ {0ULL};
411     os::memory::Mutex vmThreadSuspensionMutex_;
412     os::memory::ConditionVariable vmThreadNeedSuspensionCV_;
413     os::memory::ConditionVariable vmThreadHasSuspendedCV_;
414 
415     JSTaggedType *frameBase_ {nullptr};
416     bool stableArrayElementsGuardians_ {true};
417     InternalCallParams *internalCallParams_ {nullptr};
418 
419     // GLUE members start, very careful to modify here
420     JSTaggedValue exception_ {JSTaggedValue::Hole()};
421     JSTaggedValue globalObject_ {JSTaggedValue::Hole()};
422     GlobalEnvConstants globalConst_;  // Place-Holder
423     PropertiesCache *propertiesCache_ {nullptr};
424     EcmaGlobalStorage *globalStorage_ {nullptr};
425     JSTaggedType *currentFrame_ {nullptr};
426     JSTaggedType *leaveFrame_ {0};
427     Address bytecodeHandlers_[MAX_BYTECODE_HANDLERS];
428     Address runtimeFunctions_[MAX_RUNTIME_FUNCTIONS];
429     Address fastStubEntries_[kungfu::FAST_STUB_MAXCOUNT];
430 
431     friend class EcmaHandleScope;
432     friend class GlobalHandleCollection;
433 };
434 
435 #define GLUE_OFFSET_LIST(V)                                                                      \
436     V(GLOBAL_OBJECT, GlobalObject, EXCEPTION,                                                    \
437         JSTaggedValue::TaggedTypeSize(), JSTaggedValue::TaggedTypeSize())                        \
438     V(GLOBAL_CONSTANTS, GlobalConstants, GLOBAL_OBJECT,                                          \
439         JSTaggedValue::TaggedTypeSize(), JSTaggedValue::TaggedTypeSize())                        \
440     V(PROPERTIES_CACHE, PropertiesCache, GLOBAL_CONSTANTS,                                       \
441         static_cast<uint32_t>(ConstantIndex::CONSTATNT_COUNT) * JSTaggedValue::TaggedTypeSize(), \
442         static_cast<uint32_t>(ConstantIndex::CONSTATNT_COUNT) * JSTaggedValue::TaggedTypeSize()) \
443     V(GLOBAL_STORAGE, GlobalStorage, PROPERTIES_CACHE, sizeof(uint32_t), sizeof(uint64_t))       \
444     V(CURRENT_FRAME, CurrentFrame, GLOBAL_STORAGE, sizeof(uint32_t), sizeof(uint64_t))           \
445     V(LEAVE_FRAME, LeaveFrame, CURRENT_FRAME, sizeof(uint32_t), sizeof(uint64_t))                \
446     V(BYTECODE_HANDLERS, BytecodeHandlers, LEAVE_FRAME, sizeof(uint32_t), sizeof(uint64_t))      \
447     V(RUNTIME_FUNCTIONS, RuntimeFunctions, BYTECODE_HANDLERS,                                    \
448         JSThread::MAX_BYTECODE_HANDLERS * sizeof(uint32_t),                                      \
449         JSThread::MAX_BYTECODE_HANDLERS * sizeof(uint64_t))                                      \
450     V(FASTSTUB_ENTRIES, FastStubEntries, RUNTIME_FUNCTIONS,                                      \
451         JSThread::MAX_RUNTIME_FUNCTIONS * sizeof(uint32_t),                                      \
452         JSThread::MAX_RUNTIME_FUNCTIONS * sizeof(uint64_t))                                      \
453 
454 static constexpr uint32_t GLUE_EXCEPTION_OFFSET_32 = 0U;
455 static constexpr uint32_t GLUE_EXCEPTION_OFFSET_64 = 0U;
456 #define GLUE_OFFSET_MACRO(name, camelName, lastName, lastSize32, lastSize64)                        \
457     static constexpr uint32_t GLUE_##name##_OFFSET_32 = GLUE_##lastName##_OFFSET_32 + (lastSize32); \
458     static constexpr uint32_t GLUE_##name##_OFFSET_64 = GLUE_##lastName##_OFFSET_64 + (lastSize64);
459 GLUE_OFFSET_LIST(GLUE_OFFSET_MACRO)
460 #undef GLUE_OFFSET_MACRO
461 
462 // static check asm glue offset macro
463 #include "trampoline/ecma_asm_defines.h"
464 
465 #ifdef PANDA_TARGET_32
466 #define GLUE_OFFSET_MACRO(name, camelName, lastName, lastSize32, lastSize64)                   \
467 static_assert(GLUE_##name##_OFFSET_32 ==                                                       \
468     (JSThread::Get##camelName##Offset() - JSThread::GetExceptionOffset()));
469 GLUE_OFFSET_LIST(GLUE_OFFSET_MACRO)
470 #undef GLUE_OFFSET_MACRO
471 // check asm glue offset definition samed with clang
472 // static_assert(GLUE_CURRENT_FRAME_OFFSET_32 == ASM_GLUE_CURRENT_FRAME_OFFSET)
473 // static_assert(GLUE_RUNTIME_FUNCTIONS_OFFSET_32 == ASM_GLUE_RUNTIME_FUNCTIONS_OFFSET)
474 #endif
475 
476 #ifdef PANDA_TARGET_64
477 #define GLUE_OFFSET_MACRO(name, camelName, lastName, lastSize32, lastSize64)                   \
478 static_assert(GLUE_##name##_OFFSET_64 ==                                                       \
479     (JSThread::Get##camelName##Offset() - JSThread::GetExceptionOffset()));
480 GLUE_OFFSET_LIST(GLUE_OFFSET_MACRO)
481 #undef GLUE_OFFSET_MACRO
482 // check asm glue offset definition samed with clang
483 #endif
484 }  // namespace panda::ecmascript
485 #endif  // ECMASCRIPT_JS_THREAD_H