• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_METHOD_H
16 #define PANDA_RUNTIME_METHOD_H
17 
18 #include <atomic>
19 #include <cstdint>
20 #include <functional>
21 #include <string_view>
22 
23 #include "intrinsics_enum.h"
24 #include "libpandabase/utils/arch.h"
25 #include "libpandabase/utils/logger.h"
26 #include "libpandafile/code_data_accessor-inl.h"
27 #include "libpandafile/file.h"
28 #include "libpandafile/file_items.h"
29 #include "libpandafile/method_data_accessor.h"
30 #include "libpandafile/modifiers.h"
31 #include "runtime/bridge/bridge.h"
32 #include "runtime/include/compiler_interface.h"
33 #include "runtime/include/class_helper.h"
34 #include "runtime/include/mem/panda_containers.h"
35 #include "runtime/include/mem/panda_smart_pointers.h"
36 #include "runtime/interpreter/frame.h"
37 #include "value.h"
38 
39 namespace panda {
40 
41 class Class;
42 class ManagedThread;
43 class ProfilingData;
44 
45 #ifdef PANDA_ENABLE_GLOBAL_REGISTER_VARIABLES
46 namespace interpreter {
47 class AccVRegisterT;
48 }  // namespace interpreter
49 using interpreter::AccVRegisterT;
50 #else
51 namespace interpreter {
52 using AccVRegisterT = AccVRegister;
53 }  // namespace interpreter
54 #endif
55 
56 class FrameDeleter {
57 public:
FrameDeleter(ManagedThread * thread)58     explicit FrameDeleter(ManagedThread *thread) : thread_(thread) {}
59 
60     void operator()(Frame *frame) const;
61 
62 private:
63     ManagedThread *thread_;
64 };
65 
66 class Method {
67 public:
68     using UniqId = uint64_t;
69 
70     enum CompilationStage {
71         NOT_COMPILED,
72         WAITING,
73         COMPILATION,
74         COMPILED,
75         FAILED,
76     };
77 
78     enum class VerificationStage { NOT_VERIFIED = 0, VERIFIED_FAIL = 1, VERIFIED_OK = 2, LAST = VERIFIED_OK };
79 
80     static_assert(MinimumBitsToStore(VerificationStage::LAST) <= VERIFICATION_STATUS_WIDTH);
81 
82     using AnnotationField = panda_file::MethodDataAccessor::AnnotationField;
83 
84     class Proto {
85     public:
86         using ShortyVector = PandaSmallVector<panda_file::Type>;
87         using RefTypeVector = PandaSmallVector<std::string_view>;
88         Proto() = default;
89 
90         Proto(const panda_file::File &pf, panda_file::File::EntityId protoId);
91 
Proto(ShortyVector shorty,RefTypeVector refTypes)92         Proto(ShortyVector shorty, RefTypeVector refTypes) : shorty_(std::move(shorty)), refTypes_(std::move(refTypes))
93         {
94         }
95 
96         bool operator==(const Proto &other) const
97         {
98             return shorty_ == other.shorty_ && refTypes_ == other.refTypes_;
99         }
100 
GetReturnType()101         panda_file::Type GetReturnType() const
102         {
103             return shorty_[0];
104         }
105 
106         PANDA_PUBLIC_API std::string_view GetReturnTypeDescriptor() const;
107         PandaString GetSignature(bool includeReturnType = true);
108 
GetShorty()109         ShortyVector &GetShorty()
110         {
111             return shorty_;
112         }
113 
GetShorty()114         const ShortyVector &GetShorty() const
115         {
116             return shorty_;
117         }
118 
GetRefTypes()119         RefTypeVector &GetRefTypes()
120         {
121             return refTypes_;
122         }
123 
GetRefTypes()124         const RefTypeVector &GetRefTypes() const
125         {
126             return refTypes_;
127         }
128 
129         ~Proto() = default;
130 
131         DEFAULT_COPY_SEMANTIC(Proto);
132         DEFAULT_MOVE_SEMANTIC(Proto);
133 
134     private:
135         ShortyVector shorty_;
136         RefTypeVector refTypes_;
137     };
138 
139     class PANDA_PUBLIC_API ProtoId {
140     public:
ProtoId(const panda_file::File & pf,panda_file::File::EntityId protoId)141         ProtoId(const panda_file::File &pf, panda_file::File::EntityId protoId) : pf_(pf), protoId_(protoId) {}
142         bool operator==(const ProtoId &other) const;
143         bool operator==(const Proto &other) const;
144         bool operator!=(const ProtoId &other) const
145         {
146             return !operator==(other);
147         }
148         bool operator!=(const Proto &other) const
149         {
150             return !operator==(other);
151         }
152 
153         ~ProtoId() = default;
154 
155         DEFAULT_COPY_CTOR(ProtoId);
156         NO_COPY_OPERATOR(ProtoId);
157         NO_MOVE_SEMANTIC(ProtoId);
158 
159     private:
160         const panda_file::File &pf_;
161         panda_file::File::EntityId protoId_;
162     };
163 
164     PANDA_PUBLIC_API Method(Class *klass, const panda_file::File *pf, panda_file::File::EntityId fileId,
165                             panda_file::File::EntityId codeId, uint32_t accessFlags, uint32_t numArgs,
166                             const uint16_t *shorty);
167 
Method(const Method * method)168     explicit Method(const Method *method)
169         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
170         // should become visible
171         : accessFlags_(method->accessFlags_.load(std::memory_order_acquire)),
172           numArgs_(method->numArgs_),
173           stor16Pair_(method->stor16Pair_),
174           classWord_(method->classWord_),
175           pandaFile_(method->pandaFile_),
176           fileId_(method->fileId_),
177           codeId_(method->codeId_),
178           shorty_(method->shorty_)
179     {
180         // NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
181         pointer_.nativePointer.store(
182             // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
183             // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
184             method->pointer_.nativePointer.load(std::memory_order_relaxed), std::memory_order_relaxed);
185 
186         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
187         // store which should become visible acquire
188         compiledEntryPoint_.store(method->IsNative() ? method->GetCompiledEntryPoint()
189                                                      : GetCompiledCodeToInterpreterBridge(method),
190                                   std::memory_order_release);
191         SetCompilationStatus(CompilationStage::NOT_COMPILED);
192     }
193 
194     Method() = delete;
195     Method(const Method &) = delete;
196     Method(Method &&) = delete;
197     Method &operator=(const Method &) = delete;
198     Method &operator=(Method &&) = delete;
199     ~Method() = default;
200 
GetNumArgs()201     uint32_t GetNumArgs() const
202     {
203         return numArgs_;
204     }
205 
GetNumVregs()206     uint32_t GetNumVregs() const
207     {
208         if (!codeId_.IsValid()) {
209             return 0;
210         }
211         return panda_file::CodeDataAccessor::GetNumVregs(*(pandaFile_), codeId_);
212     }
213 
GetCodeSize()214     uint32_t GetCodeSize() const
215     {
216         if (!codeId_.IsValid()) {
217             return 0;
218         }
219         panda_file::CodeDataAccessor cda(*(pandaFile_), codeId_);
220         return cda.GetCodeSize();
221     }
222 
GetInstructions()223     const uint8_t *GetInstructions() const
224     {
225         if (!codeId_.IsValid()) {
226             return nullptr;
227         }
228         return panda_file::CodeDataAccessor::GetInstructions(*pandaFile_, codeId_);
229     }
230 
231     /*
232      * Invoke the method as a static method.
233      * Number of arguments and their types must match the method's signature
234      */
235     PANDA_PUBLIC_API Value Invoke(ManagedThread *thread, Value *args, bool proxyCall = false);
236 
InvokeVoid(ManagedThread * thread,Value * args)237     void InvokeVoid(ManagedThread *thread, Value *args)
238     {
239         Invoke(thread, args);
240     }
241 
242     /*
243      * Invoke the method as a dynamic function.
244      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
245      * args - array of arguments. The first value must be the callee function object
246      * num_args - length of args array
247      * data - panda::ExtFrame language-related extension data
248      */
249     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
250 
251     template <class InvokeHelper>
252     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
253 
254     template <class InvokeHelper>
255     void InvokeEntry(ManagedThread *thread, Frame *currentFrame, Frame *frame, const uint8_t *pc);
256 
257     /*
258      * Enter execution context (ECMAScript generators)
259      * pc - pc of context
260      * acc - accumulator of context
261      * nregs - number of registers in context
262      * regs - registers of context
263      * data - panda::ExtFrame language-related extension data
264      */
265     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
266                                          uint32_t nregs, coretypes::TaggedValue *regs);
267 
268     template <class InvokeHelper>
269     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
270                                          uint32_t nregs, coretypes::TaggedValue *regs);
271 
272     /*
273      * Create new frame for native method, but don't start execution
274      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
275      * args - array of arguments. The first value must be the callee function object
276      * num_vregs - number of registers in frame
277      * num_args - length of args array
278      * data - panda::ExtFrame language-related extension data
279      */
280     template <class InvokeHelper, class ValueT>
281     Frame *EnterNativeMethodFrame(ManagedThread *thread, uint32_t numVregs, uint32_t numArgs, ValueT *args);
282 
283     /*
284      * Pop native method frame
285      */
286     static void ExitNativeMethodFrame(ManagedThread *thread);
287 
GetClass()288     Class *GetClass() const
289     {
290         return reinterpret_cast<Class *>(classWord_);
291     }
292 
SetClass(Class * cls)293     void SetClass(Class *cls)
294     {
295         classWord_ = static_cast<ClassHelper::ClassWordSize>(ToObjPtrType(cls));
296     }
297 
SetPandaFile(const panda_file::File * file)298     void SetPandaFile(const panda_file::File *file)
299     {
300         pandaFile_ = file;
301     }
302 
GetPandaFile()303     const panda_file::File *GetPandaFile() const
304     {
305         return pandaFile_;
306     }
307 
GetFileId()308     panda_file::File::EntityId GetFileId() const
309     {
310         return fileId_;
311     }
312 
GetCodeId()313     panda_file::File::EntityId GetCodeId() const
314     {
315         return codeId_;
316     }
317 
GetHotnessCounter()318     inline int16_t GetHotnessCounter() const
319     {
320         return stor16Pair_.hotnessCounter;
321     }
322 
DecrementHotnessCounter()323     inline NO_THREAD_SANITIZE void DecrementHotnessCounter()
324     {
325         --stor16Pair_.hotnessCounter;
326     }
327 
328     static NO_THREAD_SANITIZE int16_t GetInitialHotnessCounter();
329 
330     NO_THREAD_SANITIZE void ResetHotnessCounter();
331 
332     template <class AccVRegisterPtrT>
333     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] AccVRegisterPtrT acc);
334     template <class AccVRegisterPtrT>
335     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] ManagedThread *thread, [[maybe_unused]] AccVRegisterPtrT acc);
336 
337     // NO_THREAD_SANITIZE because of perfomance degradation (see commit 7c913cb1 and MR 997#note_113500)
338     template <bool IS_CALL, class AccVRegisterPtrT>
339     NO_THREAD_SANITIZE bool DecrementHotnessCounter(uintptr_t bytecodeOffset, [[maybe_unused]] AccVRegisterPtrT cc,
340                                                     bool osr = false,
341                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
342 
343     template <bool IS_CALL, class AccVRegisterPtrT>
344     NO_THREAD_SANITIZE bool DecrementHotnessCounter(ManagedThread *thread, uintptr_t bytecodeOffset,
345                                                     [[maybe_unused]] AccVRegisterPtrT cc, bool osr = false,
346                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
347 
348     // NOTE(xucheng): change the input type to uint16_t when we don't input the max num of int32_t
SetHotnessCounter(uint32_t counter)349     inline NO_THREAD_SANITIZE void SetHotnessCounter(uint32_t counter)
350     {
351         stor16Pair_.hotnessCounter = static_cast<uint16_t>(counter);
352     }
353 
354     PANDA_PUBLIC_API int64_t GetBranchTakenCounter(uint32_t pc);
355     PANDA_PUBLIC_API int64_t GetBranchNotTakenCounter(uint32_t pc);
356 
357     int64_t GetThrowTakenCounter(uint32_t pc);
358 
GetCompiledEntryPoint()359     const void *GetCompiledEntryPoint()
360     {
361         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
362         // should become visible
363         return compiledEntryPoint_.load(std::memory_order_acquire);
364     }
365 
GetCompiledEntryPoint()366     const void *GetCompiledEntryPoint() const
367     {
368         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
369         // should become visible
370         return compiledEntryPoint_.load(std::memory_order_acquire);
371     }
372 
SetCompiledEntryPoint(const void * entryPoint)373     void SetCompiledEntryPoint(const void *entryPoint)
374     {
375         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
376         // store which should become visible acquire
377         compiledEntryPoint_.store(entryPoint, std::memory_order_release);
378     }
379 
SetInterpreterEntryPoint()380     void SetInterpreterEntryPoint()
381     {
382         if (!IsNative()) {
383             SetCompiledEntryPoint(GetCompiledCodeToInterpreterBridge(this));
384         }
385     }
386 
HasCompiledCode()387     bool HasCompiledCode() const
388     {
389         auto entryPoint = GetCompiledEntryPoint();
390         return entryPoint != GetCompiledCodeToInterpreterBridge() &&
391                entryPoint != GetCompiledCodeToInterpreterBridgeDyn();
392     }
393 
GetCompilationStatus()394     inline CompilationStage GetCompilationStatus() const
395     {
396         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
397         // should become visible
398         return static_cast<CompilationStage>((accessFlags_.load(std::memory_order_acquire) & COMPILATION_STATUS_MASK) >>
399                                              COMPILATION_STATUS_SHIFT);
400     }
401 
GetCompilationStatus(uint32_t value)402     inline CompilationStage GetCompilationStatus(uint32_t value)
403     {
404         return static_cast<CompilationStage>((value & COMPILATION_STATUS_MASK) >> COMPILATION_STATUS_SHIFT);
405     }
406 
SetCompilationStatus(enum CompilationStage newStatus)407     inline void SetCompilationStatus(enum CompilationStage newStatus)
408     {
409         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
410         // should become visible
411         auto result = (accessFlags_.load(std::memory_order_acquire) & ~COMPILATION_STATUS_MASK) |
412                       static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
413         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
414         // which should become visible acquire
415         accessFlags_.store(result, std::memory_order_release);
416     }
417 
AtomicSetCompilationStatus(enum CompilationStage oldStatus,enum CompilationStage newStatus)418     inline bool AtomicSetCompilationStatus(enum CompilationStage oldStatus, enum CompilationStage newStatus)
419     {
420         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
421         // should become visible
422         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
423         while (GetCompilationStatus(oldValue) == oldStatus) {
424             uint32_t newValue = MakeCompilationStatusValue(oldValue, newStatus);
425             if (accessFlags_.compare_exchange_strong(oldValue, newValue)) {
426                 return true;
427             }
428         }
429         return false;
430     }
431 
432     panda_file::Type GetReturnType() const;
433 
434     panda_file::File::StringData GetRefReturnType() const;
435 
436     // idx - index number of the argument in the signature
437     PANDA_PUBLIC_API panda_file::Type GetArgType(size_t idx) const;
438 
439     PANDA_PUBLIC_API panda_file::File::StringData GetRefArgType(size_t idx) const;
440 
441     template <typename Callback>
442     void EnumerateTypes(Callback handler) const;
443 
444     PANDA_PUBLIC_API panda_file::File::StringData GetName() const;
445 
446     PANDA_PUBLIC_API panda_file::File::StringData GetClassName() const;
447 
448     PANDA_PUBLIC_API PandaString GetFullName(bool withSignature = false) const;
449     PANDA_PUBLIC_API PandaString GetLineNumberAndSourceFile(uint32_t bcOffset) const;
450 
451     static uint32_t GetFullNameHashFromString(const PandaString &str);
452     static uint32_t GetClassNameHashFromString(const PandaString &str);
453 
454     PANDA_PUBLIC_API Proto GetProto() const;
455 
456     PANDA_PUBLIC_API ProtoId GetProtoId() const;
457 
GetFrameSize()458     size_t GetFrameSize() const
459     {
460         return Frame::GetAllocSize(GetNumArgs() + GetNumVregs(), EMPTY_EXT_FRAME_DATA_SIZE);
461     }
462 
463     uint32_t GetNumericalAnnotation(AnnotationField fieldId) const;
464     panda_file::File::StringData GetStringDataAnnotation(AnnotationField fieldId) const;
465 
GetAccessFlags()466     uint32_t GetAccessFlags() const
467     {
468         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
469         // should become visible
470         return accessFlags_.load(std::memory_order_acquire);
471     }
472 
SetAccessFlags(uint32_t accessFlags)473     void SetAccessFlags(uint32_t accessFlags)
474     {
475         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
476         // which should become visible acquire
477         accessFlags_.store(accessFlags, std::memory_order_release);
478     }
479 
IsStatic()480     bool IsStatic() const
481     {
482         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
483         // should become visible
484         return (accessFlags_.load(std::memory_order_acquire) & ACC_STATIC) != 0;
485     }
486 
IsNative()487     bool IsNative() const
488     {
489         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
490         // should become visible
491         return (accessFlags_.load(std::memory_order_acquire) & ACC_NATIVE) != 0;
492     }
493 
IsPublic()494     bool IsPublic() const
495     {
496         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
497         // should become visible
498         return (accessFlags_.load(std::memory_order_acquire) & ACC_PUBLIC) != 0;
499     }
500 
IsPrivate()501     bool IsPrivate() const
502     {
503         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
504         // should become visible
505         return (accessFlags_.load(std::memory_order_acquire) & ACC_PRIVATE) != 0;
506     }
507 
IsProtected()508     bool IsProtected() const
509     {
510         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
511         // should become visible
512         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROTECTED) != 0;
513     }
514 
IsIntrinsic()515     bool IsIntrinsic() const
516     {
517         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
518         // should become visible
519         return (accessFlags_.load(std::memory_order_acquire) & ACC_INTRINSIC) != 0;
520     }
521 
IsSynthetic()522     bool IsSynthetic() const
523     {
524         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
525         // should become visible
526         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNTHETIC) != 0;
527     }
528 
IsAbstract()529     bool IsAbstract() const
530     {
531         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
532         // should become visible
533         return (accessFlags_.load(std::memory_order_acquire) & ACC_ABSTRACT) != 0;
534     }
535 
IsFinal()536     bool IsFinal() const
537     {
538         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
539         // should become visible
540         return (accessFlags_.load(std::memory_order_acquire) & ACC_FINAL) != 0;
541     }
542 
IsSynchronized()543     bool IsSynchronized() const
544     {
545         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
546         // should become visible
547         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNCHRONIZED) != 0;
548     }
549 
HasSingleImplementation()550     bool HasSingleImplementation() const
551     {
552         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
553         // should become visible
554         return (accessFlags_.load(std::memory_order_acquire) & ACC_SINGLE_IMPL) != 0;
555     }
556 
IsProfiled()557     bool IsProfiled() const
558     {
559         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
560         // should become visible
561         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROFILING) != 0;
562     }
563 
IsDestroyed()564     bool IsDestroyed() const
565     {
566         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
567         // should become visible
568         return (accessFlags_.load(std::memory_order_acquire) & ACC_DESTROYED) != 0;
569     }
570 
SetHasSingleImplementation(bool v)571     void SetHasSingleImplementation(bool v)
572     {
573         if (v) {
574             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
575             // and on writes before the store
576             accessFlags_.fetch_or(ACC_SINGLE_IMPL, std::memory_order_acq_rel);
577         } else {
578             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
579             // and on writes before the store
580             accessFlags_.fetch_and(~ACC_SINGLE_IMPL, std::memory_order_acq_rel);
581         }
582     }
583 
SetProfiled()584     void SetProfiled()
585     {
586         ASSERT(!IsIntrinsic());
587         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
588         // and on writes before the store
589         accessFlags_.fetch_or(ACC_PROFILING, std::memory_order_acq_rel);
590     }
591 
SetDestroyed()592     void SetDestroyed()
593     {
594         ASSERT(!IsIntrinsic());
595         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
596         // and on writes before the store
597         accessFlags_.fetch_or(ACC_DESTROYED, std::memory_order_acq_rel);
598     }
599 
GetSingleImplementation()600     Method *GetSingleImplementation()
601     {
602         return HasSingleImplementation() ? this : nullptr;
603     }
604 
SetIntrinsic(intrinsics::Intrinsic intrinsic)605     void SetIntrinsic(intrinsics::Intrinsic intrinsic)
606     {
607         ASSERT(!IsIntrinsic());
608         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
609         // should become visible
610         ASSERT((accessFlags_.load(std::memory_order_acquire) & INTRINSIC_MASK) == 0);
611         auto result = ACC_INTRINSIC | static_cast<uint32_t>(intrinsic) << INTRINSIC_SHIFT;
612         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
613         // on writes before the store
614         accessFlags_.fetch_or(result, std::memory_order_acq_rel);
615     }
616 
GetIntrinsic()617     intrinsics::Intrinsic GetIntrinsic() const
618     {
619         ASSERT(IsIntrinsic());
620         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
621         // should become visible
622         return static_cast<intrinsics::Intrinsic>((accessFlags_.load(std::memory_order_acquire) & INTRINSIC_MASK) >>
623                                                   INTRINSIC_SHIFT);
624     }
625 
SetVTableIndex(uint16_t vtableIndex)626     void SetVTableIndex(uint16_t vtableIndex)
627     {
628         stor16Pair_.vtableIndex = vtableIndex;
629     }
630 
GetVTableIndex()631     uint16_t GetVTableIndex() const
632     {
633         return stor16Pair_.vtableIndex;
634     }
635 
SetNativePointer(void * nativePointer)636     void SetNativePointer(void *nativePointer)
637     {
638         ASSERT((IsNative() || IsProxy()));
639         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
640         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
641         pointer_.nativePointer.store(nativePointer, std::memory_order_relaxed);
642     }
643 
GetNativePointer()644     void *GetNativePointer() const
645     {
646         ASSERT((IsNative() || IsProxy()));
647         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
648         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
649         return pointer_.nativePointer.load(std::memory_order_relaxed);
650     }
651 
GetShorty()652     const uint16_t *GetShorty() const
653     {
654         return shorty_;
655     }
656 
657     uint32_t FindCatchBlockInPandaFile(const Class *cls, uint32_t pc) const;
658     uint32_t FindCatchBlock(const Class *cls, uint32_t pc) const;
659 
660     PANDA_PUBLIC_API panda_file::Type GetEffectiveArgType(size_t idx) const;
661 
662     PANDA_PUBLIC_API panda_file::Type GetEffectiveReturnType() const;
663 
SetIsDefaultInterfaceMethod()664     void SetIsDefaultInterfaceMethod()
665     {
666         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
667         // on writes before the store
668         accessFlags_.fetch_or(ACC_DEFAULT_INTERFACE_METHOD, std::memory_order_acq_rel);
669     }
670 
IsDefaultInterfaceMethod()671     bool IsDefaultInterfaceMethod() const
672     {
673         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
674         // should become visible
675         return (accessFlags_.load(std::memory_order_acquire) & ACC_DEFAULT_INTERFACE_METHOD) != 0;
676     }
677 
IsConstructor()678     bool IsConstructor() const
679     {
680         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
681         // should become visible
682         return (accessFlags_.load(std::memory_order_acquire) & ACC_CONSTRUCTOR) != 0;
683     }
684 
IsInstanceConstructor()685     bool IsInstanceConstructor() const
686     {
687         return IsConstructor() && !IsStatic();
688     }
689 
IsStaticConstructor()690     bool IsStaticConstructor() const
691     {
692         return IsConstructor() && IsStatic();
693     }
694 
GetAccessFlagsOffset()695     static constexpr uint32_t GetAccessFlagsOffset()
696     {
697         return MEMBER_OFFSET(Method, accessFlags_);
698     }
GetNumArgsOffset()699     static constexpr uint32_t GetNumArgsOffset()
700     {
701         return MEMBER_OFFSET(Method, numArgs_);
702     }
GetVTableIndexOffset()703     static constexpr uint32_t GetVTableIndexOffset()
704     {
705         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, vtableIndex);
706     }
GetHotnessCounterOffset()707     static constexpr uint32_t GetHotnessCounterOffset()
708     {
709         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, hotnessCounter);
710     }
GetClassOffset()711     static constexpr uint32_t GetClassOffset()
712     {
713         return MEMBER_OFFSET(Method, classWord_);
714     }
715 
GetCompiledEntryPointOffset()716     static constexpr uint32_t GetCompiledEntryPointOffset()
717     {
718         return MEMBER_OFFSET(Method, compiledEntryPoint_);
719     }
GetPandaFileOffset()720     static constexpr uint32_t GetPandaFileOffset()
721     {
722         return MEMBER_OFFSET(Method, pandaFile_);
723     }
GetCodeIdOffset()724     static constexpr uint32_t GetCodeIdOffset()
725     {
726         return MEMBER_OFFSET(Method, codeId_);
727     }
GetNativePointerOffset()728     static constexpr uint32_t GetNativePointerOffset()
729     {
730         return MEMBER_OFFSET(Method, pointer_);
731     }
GetShortyOffset()732     static constexpr uint32_t GetShortyOffset()
733     {
734         return MEMBER_OFFSET(Method, shorty_);
735     }
736 
737     template <typename Callback>
738     void EnumerateTryBlocks(Callback callback) const;
739 
740     template <typename Callback>
741     void EnumerateCatchBlocks(Callback callback) const;
742 
743     template <typename Callback>
744     void EnumerateExceptionHandlers(Callback callback) const;
745 
CalcUniqId(const panda_file::File * file,panda_file::File::EntityId fileId)746     static inline UniqId CalcUniqId(const panda_file::File *file, panda_file::File::EntityId fileId)
747     {
748         constexpr uint64_t HALF = 32ULL;
749         uint64_t uid = file->GetUniqId();
750         uid <<= HALF;
751         uid |= fileId.GetOffset();
752         return uid;
753     }
754 
755     // for synthetic methods, like array .ctor
756     static UniqId CalcUniqId(const uint8_t *classDescr, const uint8_t *name);
757 
GetUniqId()758     UniqId GetUniqId() const
759     {
760         return CalcUniqId(pandaFile_, fileId_);
761     }
762 
763     int32_t GetLineNumFromBytecodeOffset(uint32_t bcOffset) const;
764 
765     panda_file::File::StringData GetClassSourceFile() const;
766 
767     PANDA_PUBLIC_API void StartProfiling();
768     PANDA_PUBLIC_API void StopProfiling();
769 
770     bool IsProxy() const;
771 
GetProfilingData()772     ProfilingData *GetProfilingData()
773     {
774         if (UNLIKELY(IsNative() || IsProxy())) {
775             return nullptr;
776         }
777         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
778         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
779         return pointer_.profilingData.load(std::memory_order_acquire);
780     }
781 
GetProfilingDataWithoutCheck()782     ProfilingData *GetProfilingDataWithoutCheck()
783     {
784         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
785         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
786         return pointer_.profilingData.load(std::memory_order_acquire);
787     }
788 
GetProfilingData()789     const ProfilingData *GetProfilingData() const
790     {
791         if (UNLIKELY(IsNative() || IsProxy())) {
792             return nullptr;
793         }
794         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
795         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
796         return pointer_.profilingData.load(std::memory_order_acquire);
797     }
798 
IsProfiling()799     bool IsProfiling() const
800     {
801         return GetProfilingData() != nullptr;
802     }
803 
IsProfilingWithoutLock()804     bool IsProfilingWithoutLock() const
805     {
806         if (UNLIKELY(IsNative() || IsProxy())) {
807             return false;
808         }
809         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
810         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
811         return pointer_.profilingData.load(std::memory_order_acquire) != nullptr;
812     }
813 
814     void SetVerified(bool result);
815     bool IsVerified() const;
816     PANDA_PUBLIC_API bool Verify();
817     template <bool IS_CALL>
818     bool TryVerify();
819 
GetVerificationStage(uint32_t value)820     inline static VerificationStage GetVerificationStage(uint32_t value)
821     {
822         return static_cast<VerificationStage>((value & VERIFICATION_STATUS_MASK) >> VERIFICATION_STATUS_SHIFT);
823     }
824 
GetVerificationStage()825     inline VerificationStage GetVerificationStage() const
826     {
827         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
828         // should become visible
829         return GetVerificationStage(accessFlags_.load(std::memory_order_acquire));
830     }
831 
SetVerificationStage(enum VerificationStage newStage)832     inline void SetVerificationStage(enum VerificationStage newStage)
833     {
834         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
835         // should become visible
836         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
837         uint32_t newValue = MakeVerificationStageValue(oldValue, newStage);
838         while (!accessFlags_.compare_exchange_weak(oldValue, newValue, std::memory_order_acq_rel)) {
839             newValue = MakeVerificationStageValue(oldValue, newStage);
840         }
841     }
842 
843 private:
844     Value InvokeCompiledCode(ManagedThread *thread, uint32_t numArgs, Value *args);
845 
GetReturnValueFromTaggedValue(uint64_t retValue)846     Value GetReturnValueFromTaggedValue(uint64_t retValue)
847     {
848         panda_file::Type retType = GetReturnType();
849         if (retType.GetId() == panda_file::Type::TypeId::VOID) {
850             return Value(static_cast<int64_t>(0));
851         }
852         if (retType.GetId() == panda_file::Type::TypeId::REFERENCE) {
853             return Value(reinterpret_cast<ObjectHeader *>(retValue));
854         }
855         return Value(retValue);
856     }
857 
MakeCompilationStatusValue(uint32_t value,CompilationStage newStatus)858     inline static uint32_t MakeCompilationStatusValue(uint32_t value, CompilationStage newStatus)
859     {
860         value &= ~COMPILATION_STATUS_MASK;
861         value |= static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
862         return value;
863     }
864 
MakeVerificationStageValue(uint32_t value,VerificationStage newStage)865     inline static uint32_t MakeVerificationStageValue(uint32_t value, VerificationStage newStage)
866     {
867         value &= ~VERIFICATION_STATUS_MASK;
868         value |= static_cast<uint32_t>(newStage) << VERIFICATION_STATUS_SHIFT;
869         return value;
870     }
871 
872     template <class InvokeHelper, class ValueT>
873     ValueT InvokeInterpretedCode(ManagedThread *thread, uint32_t numActualArgs, ValueT *args);
874 
875     template <class InvokeHelper, class ValueT>
876     PandaUniquePtr<Frame, FrameDeleter> InitFrame(ManagedThread *thread, uint32_t numActualArgs, ValueT *args,
877                                                   Frame *currentFrame);
878 
879     template <class InvokeHelper, class ValueT, bool IS_NATIVE_METHOD>
880     PandaUniquePtr<Frame, FrameDeleter> InitFrameWithNumVRegs(ManagedThread *thread, uint32_t numVregs,
881                                                               uint32_t numActualArgs, ValueT *args,
882                                                               Frame *currentFrame);
883 
884     template <class InvokeHelper, class ValueT>
885     ValueT GetReturnValueFromException();
886 
887     template <class InvokeHelper, class ValueT>
888     ValueT GetReturnValueFromAcc(interpreter::AccVRegister &aacVreg);
889 
890     template <class InvokeHelper, class ValueT>
891     ValueT InvokeImpl(ManagedThread *thread, uint32_t numActualArgs, ValueT *args, bool proxyCall);
892 
893 private:
894     union PointerInMethod {
895         // It's native pointer when the method is native or proxy method.
896         std::atomic<void *> nativePointer;
897         // It's profiling data when the method isn't native or proxy method.
898         std::atomic<ProfilingData *> profilingData;
899     };
900 
901     struct Storage16Pair {
902         uint16_t vtableIndex;
903         int16_t hotnessCounter;
904     };
905 
906     std::atomic_uint32_t accessFlags_;
907     uint32_t numArgs_;
908     Storage16Pair stor16Pair_;
909     ClassHelper::ClassWordSize classWord_;
910 
911     std::atomic<const void *> compiledEntryPoint_ {nullptr};
912     const panda_file::File *pandaFile_;
913     union PointerInMethod pointer_ {
914     };
915 
916     panda_file::File::EntityId fileId_;
917     panda_file::File::EntityId codeId_;
918     const uint16_t *shorty_;
919 };
920 
921 static_assert(!std::is_polymorphic_v<Method>);
922 
923 }  // namespace panda
924 
925 #endif  // PANDA_RUNTIME_METHOD_H
926