• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_METHOD_H_
16 #define PANDA_RUNTIME_METHOD_H_
17 
18 #include <atomic>
19 #include <cstdint>
20 #include <functional>
21 #include <string_view>
22 
23 #include "intrinsics_enum.h"
24 #include "libpandabase/utils/arch.h"
25 #include "libpandabase/utils/logger.h"
26 #include "libpandafile/code_data_accessor-inl.h"
27 #include "libpandafile/file.h"
28 #include "libpandafile/file_items.h"
29 #include "libpandafile/modifiers.h"
30 #include "runtime/bridge/bridge.h"
31 #include "runtime/include/compiler_interface.h"
32 #include "runtime/include/class_helper.h"
33 #include "runtime/include/mem/panda_containers.h"
34 #include "runtime/include/mem/panda_smart_pointers.h"
35 #include "runtime/interpreter/frame.h"
36 #include "value.h"
37 
38 namespace panda {
39 
40 class Class;
41 class ManagedThread;
42 class ProfilingData;
43 
44 #ifdef PANDA_ENABLE_GLOBAL_REGISTER_VARIABLES
45 namespace interpreter {
46 class AccVRegisterT;
47 }  // namespace interpreter
48 using interpreter::AccVRegisterT;
49 #else
50 namespace interpreter {
51 using AccVRegisterT = AccVRegister;
52 }  // namespace interpreter
53 #endif
54 
55 class FrameDeleter {
56 public:
FrameDeleter(ManagedThread * thread)57     explicit FrameDeleter(ManagedThread *thread) : thread_(thread) {}
58 
59     void operator()(Frame *frame) const;
60 
61 private:
62     ManagedThread *thread_;
63 };
64 
65 class Method {
66 public:
67     using UniqId = uint64_t;
68 
69     enum CompilationStage {
70         NOT_COMPILED,
71         WAITING,
72         COMPILATION,
73         COMPILED,
74         FAILED,
75     };
76 
77     enum class VerificationStage {
78         // There is allocated a separate bit for each state. Totally 3 bits is used.
79         // When the method is not verified all bits are zero.
80         // The next state is waiting for verification uses 2nd bit.
81         // The final result (ok or fail) is stored in 1st and 0th bits.
82         // State is changing as follow:
83         //       000      ->    100    --+-->  110
84         // (not verified)    (waiting)   |    (ok)
85         //                               |
86         //                               +-->  101
87         //                                    (fail)
88         // To read the state __builtin_ffs is used which returns index + 1 of the first set bit
89         // or zero for 0 value. See BitsToVerificationStage for details about conversion set bit
90         // index to VerificationStage.
91         // So the value's order is choosen in a such way early stage must have highest value.
92         NOT_VERIFIED = 0,
93         VERIFIED_FAIL = 1,
94         VERIFIED_OK = 2,
95         WAITING = 4,
96     };
97 
98     enum AnnotationField : uint32_t {
99         IC_SIZE = 0,
100         FUNCTION_LENGTH,
101         FUNCTION_NAME,
102         STRING_DATA_BEGIN = FUNCTION_NAME,
103         STRING_DATA_END = FUNCTION_NAME
104     };
105 
106     class Proto {
107     public:
108         using ShortyVector = PandaSmallVector<panda_file::Type>;
109         using RefTypeVector = PandaSmallVector<std::string_view>;
110         Proto() = default;
111 
112         Proto(const panda_file::File &pf, panda_file::File::EntityId proto_id);
113 
Proto(ShortyVector shorty,RefTypeVector ref_types)114         Proto(ShortyVector shorty, RefTypeVector ref_types)
115             : shorty_(std::move(shorty)), ref_types_(std::move(ref_types))
116         {
117         }
118 
119         bool operator==(const Proto &other) const
120         {
121             return shorty_ == other.shorty_ && ref_types_ == other.ref_types_;
122         }
123 
GetReturnType()124         panda_file::Type GetReturnType() const
125         {
126             return shorty_[0];
127         }
128 
129         std::string_view GetReturnTypeDescriptor() const;
130 
GetShorty()131         ShortyVector &GetShorty()
132         {
133             return shorty_;
134         }
135 
GetShorty()136         const ShortyVector &GetShorty() const
137         {
138             return shorty_;
139         }
140 
GetRefTypes()141         RefTypeVector &GetRefTypes()
142         {
143             return ref_types_;
144         }
145 
GetRefTypes()146         const RefTypeVector &GetRefTypes() const
147         {
148             return ref_types_;
149         }
150 
151         ~Proto() = default;
152 
153         DEFAULT_COPY_SEMANTIC(Proto);
154         DEFAULT_MOVE_SEMANTIC(Proto);
155 
156     private:
157         ShortyVector shorty_;
158         RefTypeVector ref_types_;
159     };
160 
161     class ProtoId {
162     public:
ProtoId(const panda_file::File & pf,panda_file::File::EntityId proto_id)163         ProtoId(const panda_file::File &pf, panda_file::File::EntityId proto_id) : pf_(pf), proto_id_(proto_id) {}
164         bool operator==(const ProtoId &other) const;
165         bool operator==(const Proto &other) const;
166         bool operator!=(const ProtoId &other) const
167         {
168             return !operator==(other);
169         }
170         bool operator!=(const Proto &other) const
171         {
172             return !operator==(other);
173         }
174 
175         ~ProtoId() = default;
176 
177         DEFAULT_COPY_CTOR(ProtoId)
178         NO_COPY_OPERATOR(ProtoId);
179         NO_MOVE_SEMANTIC(ProtoId);
180 
181     private:
182         const panda_file::File &pf_;
183         panda_file::File::EntityId proto_id_;
184     };
185 
186     Method(Class *klass, const panda_file::File *pf, panda_file::File::EntityId file_id,
187            panda_file::File::EntityId code_id, uint32_t access_flags, uint32_t num_args, const uint16_t *shorty);
188 
Method(const Method * method)189     explicit Method(const Method *method)
190         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
191         // should become visible
192         : access_flags_(method->access_flags_.load(std::memory_order_acquire)),
193           num_args_(method->num_args_),
194           stor_16_pair_(method->stor_16_pair_),
195           class_word_(method->class_word_),
196           panda_file_(method->panda_file_),
197           file_id_(method->file_id_),
198           code_id_(method->code_id_),
199           shorty_(method->shorty_)
200     {
201         // NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
202         pointer_.native_pointer_.store(
203             // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
204             // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
205             method->pointer_.native_pointer_.load(std::memory_order_relaxed), std::memory_order_relaxed);
206 
207         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
208         // store which should become visible acquire
209         compiled_entry_point_.store(method->IsNative() ? method->GetCompiledEntryPoint()
210                                                        : GetCompiledCodeToInterpreterBridge(method),
211                                     std::memory_order_release);
212         SetCompilationStatus(CompilationStage::NOT_COMPILED);
213     }
214 
215     Method() = delete;
216     Method(const Method &) = delete;
217     Method(Method &&) = delete;
218     Method &operator=(const Method &) = delete;
219     Method &operator=(Method &&) = delete;
220 
GetNumArgs()221     uint32_t GetNumArgs() const
222     {
223         return num_args_;
224     }
225 
GetNumVregs()226     uint32_t GetNumVregs() const
227     {
228         if (!code_id_.IsValid()) {
229             return 0;
230         }
231         return panda_file::CodeDataAccessor::GetNumVregs(*(panda_file_), code_id_);
232     }
233 
GetCodeSize()234     uint32_t GetCodeSize() const
235     {
236         if (!code_id_.IsValid()) {
237             return 0;
238         }
239         panda_file::CodeDataAccessor cda(*(panda_file_), code_id_);
240         return cda.GetCodeSize();
241     }
242 
GetInstructions()243     const uint8_t *GetInstructions() const
244     {
245         if (!code_id_.IsValid()) {
246             return nullptr;
247         }
248         return panda_file::CodeDataAccessor::GetInstructions(*panda_file_, code_id_);
249     }
250 
251     /*
252      * Invoke the method as a static method.
253      * Number of arguments and their types must match the method's signature
254      */
255     Value Invoke(ManagedThread *thread, Value *args, bool proxy_call = false);
256 
InvokeVoid(ManagedThread * thread,Value * args)257     void InvokeVoid(ManagedThread *thread, Value *args)
258     {
259         Invoke(thread, args);
260     }
261 
262     /*
263      * Invoke the method as a dynamic function.
264      * Number of arguments may vary, all arguments must be of type DecodedTaggedValue.
265      * args - array of arguments. The first value must be the callee function object
266      * num_args - length of args array
267      * data - panda::ExtFrame language-related extension data
268      */
269     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t num_args, coretypes::TaggedValue *args);
270 
271     template <class InvokeHelper>
272     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t num_args, coretypes::TaggedValue *args);
273 
274     /*
275      * Enter execution context (ECMAScript generators)
276      * pc - pc of context
277      * acc - accumulator of context
278      * nregs - number of registers in context
279      * regs - registers of context
280      * data - panda::ExtFrame language-related extension data
281      */
282     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue acc,
283                                          uint32_t nregs, coretypes::TaggedValue *regs);
284 
285     template <class InvokeHelper>
286     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue acc,
287                                          uint32_t nregs, coretypes::TaggedValue *regs);
288 
289     /*
290      * Create new frame for native method, but don't start execution
291      * Number of arguments may vary, all arguments must be of type DecodedTaggedValue.
292      * args - array of arguments. The first value must be the callee function object
293      * num_vregs - number of registers in frame
294      * num_args - length of args array
295      * data - panda::ExtFrame language-related extension data
296      */
297     template <class InvokeHelper, class ValueT>
298     Frame *EnterNativeMethodFrame(ManagedThread *thread, uint32_t num_vregs, uint32_t num_args, ValueT *args);
299 
300     /*
301      * Pop native method frame
302      */
303     static void ExitNativeMethodFrame(ManagedThread *thread);
304 
GetClass()305     Class *GetClass() const
306     {
307         return reinterpret_cast<Class *>(class_word_);
308     }
309 
SetClass(Class * cls)310     void SetClass(Class *cls)
311     {
312         class_word_ = static_cast<ClassHelper::classWordSize>(ToObjPtrType(cls));
313     }
314 
SetPandaFile(const panda_file::File * file)315     void SetPandaFile(const panda_file::File *file)
316     {
317         panda_file_ = file;
318     }
319 
GetPandaFile()320     const panda_file::File *GetPandaFile() const
321     {
322         return panda_file_;
323     }
324 
GetFileId()325     panda_file::File::EntityId GetFileId() const
326     {
327         return file_id_;
328     }
329 
GetCodeId()330     panda_file::File::EntityId GetCodeId() const
331     {
332         return code_id_;
333     }
334 
GetHotnessCounter()335     inline uint16_t GetHotnessCounter() const
336     {
337         return stor_16_pair_.hotness_counter_;
338     }
339 
IncrementHotnessCounter()340     inline NO_THREAD_SANITIZE void IncrementHotnessCounter()
341     {
342         ++stor_16_pair_.hotness_counter_;
343     }
344 
ResetHotnessCounter()345     NO_THREAD_SANITIZE void ResetHotnessCounter()
346     {
347         stor_16_pair_.hotness_counter_ = 0;
348     }
349 
350     template <class AccVRegisterPtrT>
351     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] AccVRegisterPtrT acc);
352 
353     // NO_THREAD_SANITIZE because of perfomance degradation (see commit 7c913cb1 and MR 997#note_113500)
354     template <class AccVRegisterPtrT>
355     NO_THREAD_SANITIZE bool IncrementHotnessCounter(uintptr_t bytecode_offset, [[maybe_unused]] AccVRegisterPtrT cc,
356                                                     bool osr = false);
357 
358     // TODO(xucheng): change the input type to uint16_t when we don't input the max num of int32_t
SetHotnessCounter(uint32_t counter)359     inline NO_THREAD_SANITIZE void SetHotnessCounter(uint32_t counter)
360     {
361         stor_16_pair_.hotness_counter_ = static_cast<uint16_t>(counter);
362     }
363 
364     int64_t GetBranchTakenCounter(uint32_t pc);
365     int64_t GetBranchNotTakenCounter(uint32_t pc);
366 
GetCompiledEntryPoint()367     const void *GetCompiledEntryPoint()
368     {
369         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
370         // should become visible
371         return compiled_entry_point_.load(std::memory_order_acquire);
372     }
373 
GetCompiledEntryPoint()374     const void *GetCompiledEntryPoint() const
375     {
376         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
377         // should become visible
378         return compiled_entry_point_.load(std::memory_order_acquire);
379     }
380 
SetCompiledEntryPoint(const void * entry_point)381     void SetCompiledEntryPoint(const void *entry_point)
382     {
383         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
384         // store which should become visible acquire
385         compiled_entry_point_.store(entry_point, std::memory_order_release);
386     }
387 
SetInterpreterEntryPoint()388     void SetInterpreterEntryPoint()
389     {
390         if (!IsNative()) {
391             SetCompiledEntryPoint(GetCompiledCodeToInterpreterBridge(this));
392         }
393     }
394 
HasCompiledCode()395     bool HasCompiledCode() const
396     {
397         auto entry_point = GetCompiledEntryPoint();
398         return entry_point != GetCompiledCodeToInterpreterBridge() &&
399                entry_point != GetCompiledCodeToInterpreterBridgeDyn();
400     }
401 
GetCompilationStatus()402     inline CompilationStage GetCompilationStatus()
403     {
404         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
405         // should become visible
406         return static_cast<CompilationStage>(
407             (access_flags_.load(std::memory_order_acquire) & COMPILATION_STATUS_MASK) >> COMPILATION_STATUS_SHIFT);
408     }
409 
GetCompilationStatus(uint32_t value)410     inline CompilationStage GetCompilationStatus(uint32_t value)
411     {
412         return static_cast<CompilationStage>((value & COMPILATION_STATUS_MASK) >> COMPILATION_STATUS_SHIFT);
413     }
414 
SetCompilationStatus(enum CompilationStage new_status)415     inline void SetCompilationStatus(enum CompilationStage new_status)
416     {
417         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
418         // should become visible
419         auto result = (access_flags_.load(std::memory_order_acquire) & ~COMPILATION_STATUS_MASK) |
420                       static_cast<uint32_t>(new_status) << COMPILATION_STATUS_SHIFT;
421         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
422         // which should become visible acquire
423         access_flags_.store(result, std::memory_order_release);
424     }
425 
AtomicSetCompilationStatus(enum CompilationStage old_status,enum CompilationStage new_status)426     inline bool AtomicSetCompilationStatus(enum CompilationStage old_status, enum CompilationStage new_status)
427     {
428         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
429         // should become visible
430         uint32_t old_value = access_flags_.load(std::memory_order_acquire);
431         while (GetCompilationStatus(old_value) == old_status) {
432             uint32_t new_value = MakeCompilationStatusValue(old_value, new_status);
433             if (access_flags_.compare_exchange_strong(old_value, new_value)) {
434                 return true;
435             }
436         }
437         return false;
438     }
439 
440     panda_file::Type GetReturnType() const;
441 
442     panda_file::File::StringData GetRefReturnType() const;
443 
444     // idx - index number of the argument in the signature
445     panda_file::Type GetArgType(size_t idx) const;
446 
447     panda_file::File::StringData GetRefArgType(size_t idx) const;
448 
449     template <typename Callback>
450     void EnumerateTypes(Callback handler) const;
451 
452     panda_file::File::StringData GetName() const;
453 
454     panda_file::File::StringData GetClassName() const;
455 
456     PandaString GetFullName(bool with_signature = false) const;
457 
458     static uint32_t GetFullNameHashFromString(const PandaString &str);
459     static uint32_t GetClassNameHashFromString(const PandaString &str);
460 
461     Proto GetProto() const;
462 
463     ProtoId GetProtoId() const;
464 
GetFrameSize()465     size_t GetFrameSize() const
466     {
467         return Frame::GetAllocSize(GetNumArgs() + GetNumVregs(), EmptyExtFrameDataSize);
468     }
469 
470     uint32_t GetNumericalAnnotation(AnnotationField field_id) const;
471     panda_file::File::StringData GetStringDataAnnotation(AnnotationField field_id) const;
472 
GetAccessFlags()473     uint32_t GetAccessFlags() const
474     {
475         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
476         // should become visible
477         return access_flags_.load(std::memory_order_acquire);
478     }
479 
SetAccessFlags(uint32_t access_flags)480     void SetAccessFlags(uint32_t access_flags)
481     {
482         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
483         // which should become visible acquire
484         access_flags_.store(access_flags, std::memory_order_release);
485     }
486 
IsStatic()487     bool IsStatic() const
488     {
489         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
490         // should become visible
491         return (access_flags_.load(std::memory_order_acquire) & ACC_STATIC) != 0;
492     }
493 
IsNative()494     bool IsNative() const
495     {
496         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
497         // should become visible
498         return (access_flags_.load(std::memory_order_acquire) & ACC_NATIVE) != 0;
499     }
500 
IsPublic()501     bool IsPublic() const
502     {
503         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
504         // should become visible
505         return (access_flags_.load(std::memory_order_acquire) & ACC_PUBLIC) != 0;
506     }
507 
IsPrivate()508     bool IsPrivate() const
509     {
510         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
511         // should become visible
512         return (access_flags_.load(std::memory_order_acquire) & ACC_PRIVATE) != 0;
513     }
514 
IsProtected()515     bool IsProtected() const
516     {
517         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
518         // should become visible
519         return (access_flags_.load(std::memory_order_acquire) & ACC_PROTECTED) != 0;
520     }
521 
IsIntrinsic()522     bool IsIntrinsic() const
523     {
524         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
525         // should become visible
526         return (access_flags_.load(std::memory_order_acquire) & ACC_INTRINSIC) != 0;
527     }
528 
IsSynthetic()529     bool IsSynthetic() const
530     {
531         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
532         // should become visible
533         return (access_flags_.load(std::memory_order_acquire) & ACC_SYNTHETIC) != 0;
534     }
535 
IsAbstract()536     bool IsAbstract() const
537     {
538         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
539         // should become visible
540         return (access_flags_.load(std::memory_order_acquire) & ACC_ABSTRACT) != 0;
541     }
542 
IsFinal()543     bool IsFinal() const
544     {
545         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
546         // should become visible
547         return (access_flags_.load(std::memory_order_acquire) & ACC_FINAL) != 0;
548     }
549 
IsSynchronized()550     bool IsSynchronized() const
551     {
552         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
553         // should become visible
554         return (access_flags_.load(std::memory_order_acquire) & ACC_SYNCHRONIZED) != 0;
555     }
556 
HasSingleImplementation()557     bool HasSingleImplementation() const
558     {
559         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
560         // should become visible
561         return (access_flags_.load(std::memory_order_acquire) & ACC_SINGLE_IMPL) != 0;
562     }
563 
SetHasSingleImplementation(bool v)564     void SetHasSingleImplementation(bool v)
565     {
566         if (v) {
567             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
568             // and on writes before the store
569             access_flags_.fetch_or(ACC_SINGLE_IMPL, std::memory_order_acq_rel);
570         } else {
571             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
572             // and on writes before the store
573             access_flags_.fetch_and(~ACC_SINGLE_IMPL, std::memory_order_acq_rel);
574         }
575     }
576 
GetSingleImplementation()577     Method *GetSingleImplementation()
578     {
579         return HasSingleImplementation() ? this : nullptr;
580     }
581 
SetIntrinsic(intrinsics::Intrinsic intrinsic)582     void SetIntrinsic(intrinsics::Intrinsic intrinsic)
583     {
584         ASSERT(!IsIntrinsic());
585         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
586         // should become visible
587         ASSERT((access_flags_.load(std::memory_order_acquire) & INTRINSIC_MASK) == 0);
588         auto result = ACC_INTRINSIC | static_cast<uint32_t>(intrinsic) << INTRINSIC_SHIFT;
589         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
590         // on writes before the store
591         access_flags_.fetch_or(result, std::memory_order_acq_rel);
592     }
593 
GetIntrinsic()594     intrinsics::Intrinsic GetIntrinsic() const
595     {
596         ASSERT(IsIntrinsic());
597         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
598         // should become visible
599         return static_cast<intrinsics::Intrinsic>((access_flags_.load(std::memory_order_acquire) & INTRINSIC_MASK) >>
600                                                   INTRINSIC_SHIFT);
601     }
602 
SetVTableIndex(uint16_t vtable_index)603     void SetVTableIndex(uint16_t vtable_index)
604     {
605         stor_16_pair_.vtable_index_ = vtable_index;
606     }
607 
GetVTableIndex()608     uint16_t GetVTableIndex() const
609     {
610         return stor_16_pair_.vtable_index_;
611     }
612 
SetNativePointer(void * native_pointer)613     void SetNativePointer(void *native_pointer)
614     {
615         ASSERT((IsNative() || IsProxy()));
616         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
617         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
618         pointer_.native_pointer_.store(native_pointer, std::memory_order_relaxed);
619     }
620 
GetNativePointer()621     void *GetNativePointer() const
622     {
623         ASSERT((IsNative() || IsProxy()));
624         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
625         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
626         return pointer_.native_pointer_.load(std::memory_order_relaxed);
627     }
628 
GetShorty()629     const uint16_t *GetShorty() const
630     {
631         return shorty_;
632     }
633 
634     uint32_t FindCatchBlock(const Class *cls, uint32_t pc) const;
635 
636     panda_file::Type GetEffectiveArgType(size_t idx) const;
637 
638     panda_file::Type GetEffectiveReturnType() const;
639 
SetIsDefaultInterfaceMethod()640     void SetIsDefaultInterfaceMethod()
641     {
642         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
643         // on writes before the store
644         access_flags_.fetch_or(ACC_DEFAULT_INTERFACE_METHOD, std::memory_order_acq_rel);
645     }
646 
IsDefaultInterfaceMethod()647     bool IsDefaultInterfaceMethod() const
648     {
649         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
650         // should become visible
651         return (access_flags_.load(std::memory_order_acquire) & ACC_DEFAULT_INTERFACE_METHOD) != 0;
652     }
653 
IsConstructor()654     bool IsConstructor() const
655     {
656         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
657         // should become visible
658         return (access_flags_.load(std::memory_order_acquire) & ACC_CONSTRUCTOR) != 0;
659     }
660 
IsInstanceConstructor()661     bool IsInstanceConstructor() const
662     {
663         return IsConstructor() && !IsStatic();
664     }
665 
IsStaticConstructor()666     bool IsStaticConstructor() const
667     {
668         return IsConstructor() && IsStatic();
669     }
670 
GetAccessFlagsOffset()671     static constexpr uint32_t GetAccessFlagsOffset()
672     {
673         return MEMBER_OFFSET(Method, access_flags_);
674     }
GetNumArgsOffset()675     static constexpr uint32_t GetNumArgsOffset()
676     {
677         return MEMBER_OFFSET(Method, num_args_);
678     }
GetVTableIndexOffset()679     static constexpr uint32_t GetVTableIndexOffset()
680     {
681         return MEMBER_OFFSET(Method, stor_16_pair_) + MEMBER_OFFSET(Storage16Pair, vtable_index_);
682     }
GetHotnessCounterOffset()683     static constexpr uint32_t GetHotnessCounterOffset()
684     {
685         return MEMBER_OFFSET(Method, stor_16_pair_) + MEMBER_OFFSET(Storage16Pair, hotness_counter_);
686     }
GetClassOffset()687     static constexpr uint32_t GetClassOffset()
688     {
689         return MEMBER_OFFSET(Method, class_word_);
690     }
691 
GetCompiledEntryPointOffset()692     static constexpr uint32_t GetCompiledEntryPointOffset()
693     {
694         return MEMBER_OFFSET(Method, compiled_entry_point_);
695     }
GetPandaFileOffset()696     static constexpr uint32_t GetPandaFileOffset()
697     {
698         return MEMBER_OFFSET(Method, panda_file_);
699     }
GetNativePointerOffset()700     static constexpr uint32_t GetNativePointerOffset()
701     {
702         return MEMBER_OFFSET(Method, pointer_);
703     }
GetShortyOffset()704     static constexpr uint32_t GetShortyOffset()
705     {
706         return MEMBER_OFFSET(Method, shorty_);
707     }
708 
709     template <typename Callback>
710     void EnumerateTryBlocks(Callback callback) const;
711 
712     template <typename Callback>
713     void EnumerateCatchBlocks(Callback callback) const;
714 
715     template <typename Callback>
716     void EnumerateExceptionHandlers(Callback callback) const;
717 
CalcUniqId(const panda_file::File * file,panda_file::File::EntityId file_id)718     static inline UniqId CalcUniqId(const panda_file::File *file, panda_file::File::EntityId file_id)
719     {
720         constexpr uint64_t HALF = 32ULL;
721         uint64_t uid = file->GetUniqId();
722         uid <<= HALF;
723         uid |= file_id.GetOffset();
724         return uid;
725     }
726 
727     // for synthetic methods, like array .ctor
728     static UniqId CalcUniqId(const uint8_t *class_descr, const uint8_t *name);
729 
GetUniqId()730     UniqId GetUniqId() const
731     {
732         return CalcUniqId(panda_file_, file_id_);
733     }
734 
735     int32_t GetLineNumFromBytecodeOffset(uint32_t bc_offset) const;
736 
737     panda_file::File::StringData GetClassSourceFile() const;
738 
739     void StartProfiling();
740     void StopProfiling();
741 
742     bool IsProxy() const;
743 
GetProfilingData()744     ProfilingData *GetProfilingData()
745     {
746         if (UNLIKELY(IsNative() || IsProxy())) {
747             return nullptr;
748         }
749         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
750         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
751         return pointer_.profiling_data_.load(std::memory_order_acquire);
752     }
753 
GetProfilingDataWithoutCheck()754     ProfilingData *GetProfilingDataWithoutCheck()
755     {
756         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
757         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
758         return pointer_.profiling_data_.load(std::memory_order_acquire);
759     }
760 
GetProfilingData()761     const ProfilingData *GetProfilingData() const
762     {
763         if (UNLIKELY(IsNative() || IsProxy())) {
764             return nullptr;
765         }
766         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
767         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
768         return pointer_.profiling_data_.load(std::memory_order_acquire);
769     }
770 
IsProfiling()771     bool IsProfiling() const
772     {
773         return GetProfilingData() != nullptr;
774     }
775 
IsProfilingWithoutLock()776     bool IsProfilingWithoutLock() const
777     {
778         if (UNLIKELY(IsNative() || IsProxy())) {
779             return false;
780         }
781         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
782         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
783         return pointer_.profiling_data_.load(std::memory_order_acquire) != nullptr;
784     }
785 
786     bool AddJobInQueue();
787     void WaitForVerification();
788     void SetVerified(bool result);
789     bool IsVerified() const;
790     bool Verify();
791     void EnqueueForVerification();
792 
793     ~Method();
794 
795 private:
796     VerificationStage GetVerificationStage() const;
797     void SetVerificationStage(VerificationStage stage);
798     VerificationStage ExchangeVerificationStage(VerificationStage stage);
799     static VerificationStage BitsToVerificationStage(uint32_t bits);
800 
801     Value InvokeCompiledCode(ManagedThread *thread, uint32_t num_args, Value *args);
802 
GetReturnValueFromTaggedValue(DecodedTaggedValue ret_value)803     Value GetReturnValueFromTaggedValue(DecodedTaggedValue ret_value)
804     {
805         Value res(static_cast<int64_t>(0));
806 
807         panda_file::Type ret_type = GetReturnType();
808 
809         if (ret_type.GetId() != panda_file::Type::TypeId::VOID) {
810             if (ret_type.GetId() == panda_file::Type::TypeId::REFERENCE) {
811                 res = Value(reinterpret_cast<ObjectHeader *>(ret_value.value));
812             } else if (ret_type.GetId() == panda_file::Type::TypeId::TAGGED) {
813                 res = Value(ret_value.value, ret_value.tag);
814             } else {
815                 res = Value(ret_value.value);
816             }
817         }
818 
819         return res;
820     }
821 
MakeCompilationStatusValue(uint32_t value,CompilationStage new_status)822     inline static uint32_t MakeCompilationStatusValue(uint32_t value, CompilationStage new_status)
823     {
824         value &= ~COMPILATION_STATUS_MASK;
825         value |= static_cast<uint32_t>(new_status) << COMPILATION_STATUS_SHIFT;
826         return value;
827     }
828 
829     template <class InvokeHelper, class ValueT>
830     ValueT InvokeInterpretedCode(ManagedThread *thread, uint32_t num_actual_args, ValueT *args);
831 
832     template <class InvokeHelper, class ValueT>
833     PandaUniquePtr<Frame, FrameDeleter> InitFrame(ManagedThread *thread, uint32_t num_actual_args, ValueT *args,
834                                                   Frame *current_frame);
835 
836     template <class InvokeHelper, class ValueT, bool is_native_method>
837     PandaUniquePtr<Frame, FrameDeleter> InitFrameWithNumVRegs(ManagedThread *thread, uint32_t num_vregs,
838                                                               uint32_t num_actual_args, ValueT *args,
839                                                               Frame *current_frame);
840 
841     template <class InvokeHelper, class ValueT>
842     ValueT GetReturnValueFromException();
843 
844     template <class InvokeHelper, class ValueT>
845     ValueT GetReturnValueFromAcc(interpreter::AccVRegister &aac_vreg);
846 
847     template <class InvokeHelper, class ValueT>
848     ValueT InvokeImpl(ManagedThread *thread, uint32_t num_actual_args, ValueT *args, bool proxy_call);
849 
850 private:
851     union PointerInMethod {
852         // It's native pointer when the method is native or proxy method.
853         std::atomic<void *> native_pointer_;
854         // It's profiling data when the method isn't native or proxy method.
855         std::atomic<ProfilingData *> profiling_data_;
856     };
857 
858     struct Storage16Pair {
859         uint16_t vtable_index_;
860         uint16_t hotness_counter_;
861     };
862 
863     std::atomic_uint32_t access_flags_;
864     uint32_t num_args_;
865     Storage16Pair stor_16_pair_;
866     ClassHelper::classWordSize class_word_;
867 
868     std::atomic<const void *> compiled_entry_point_ {nullptr};
869     const panda_file::File *panda_file_;
870     union PointerInMethod pointer_ {
871     };
872 
873     panda_file::File::EntityId file_id_;
874     panda_file::File::EntityId code_id_;
875     const uint16_t *shorty_;
876 };
877 
878 static_assert(!std::is_polymorphic_v<Method>);
879 
880 }  // namespace panda
881 
882 #endif  // PANDA_RUNTIME_METHOD_H_
883