• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_METHOD_H_
16 #define PANDA_RUNTIME_METHOD_H_
17 
18 #include <atomic>
19 #include <cstdint>
20 #include <functional>
21 #include <string_view>
22 
23 #include "intrinsics_enum.h"
24 #include "libpandabase/utils/arch.h"
25 #include "libpandabase/utils/logger.h"
26 #include "libpandafile/code_data_accessor-inl.h"
27 #include "libpandafile/file.h"
28 #include "libpandafile/file_items.h"
29 #include "libpandafile/method_data_accessor.h"
30 #include "libpandafile/modifiers.h"
31 #include "runtime/bridge/bridge.h"
32 #include "runtime/include/compiler_interface.h"
33 #include "runtime/include/class_helper.h"
34 #include "runtime/include/mem/panda_containers.h"
35 #include "runtime/include/mem/panda_smart_pointers.h"
36 #include "runtime/interpreter/frame.h"
37 #include "value.h"
38 
39 namespace ark {
40 
41 class Class;
42 class ManagedThread;
43 class ProfilingData;
44 
45 #ifdef PANDA_ENABLE_GLOBAL_REGISTER_VARIABLES
46 namespace interpreter {
47 class AccVRegisterT;
48 }  // namespace interpreter
49 using interpreter::AccVRegisterT;
50 #else
51 namespace interpreter {
52 using AccVRegisterT = AccVRegister;
53 }  // namespace interpreter
54 #endif
55 
56 class FrameDeleter {
57 public:
FrameDeleter(ManagedThread * thread)58     explicit FrameDeleter(ManagedThread *thread) : thread_(thread) {}
59 
60     void operator()(Frame *frame) const;
61 
62 private:
63     ManagedThread *thread_;
64 };
65 
66 class Method {
67 public:
68     using UniqId = uint64_t;
69 
70     enum CompilationStage {
71         NOT_COMPILED,
72         WAITING,
73         COMPILATION,
74         COMPILED,
75         FAILED,
76     };
77 
78     enum class VerificationStage { NOT_VERIFIED = 0, VERIFIED_FAIL = 1, VERIFIED_OK = 2, LAST = VERIFIED_OK };
79 
80     static_assert(MinimumBitsToStore(VerificationStage::LAST) <= VERIFICATION_STATUS_WIDTH);
81 
82     using AnnotationField = panda_file::MethodDataAccessor::AnnotationField;
83 
84     class Proto {
85     public:
86         using ShortyVector = PandaSmallVector<panda_file::Type>;
87         using RefTypeVector = PandaSmallVector<std::string_view>;
88         Proto() = default;
89 
90         Proto(const panda_file::File &pf, panda_file::File::EntityId protoId);
91 
Proto(ShortyVector shorty,RefTypeVector refTypes)92         Proto(ShortyVector shorty, RefTypeVector refTypes) : shorty_(std::move(shorty)), refTypes_(std::move(refTypes))
93         {
94         }
95 
96         bool operator==(const Proto &other) const
97         {
98             return shorty_ == other.shorty_ && refTypes_ == other.refTypes_;
99         }
100 
GetReturnType()101         panda_file::Type GetReturnType() const
102         {
103             return shorty_[0];
104         }
105 
106         PANDA_PUBLIC_API std::string_view GetReturnTypeDescriptor() const;
107         PandaString GetSignature(bool includeReturnType = true);
108 
GetShorty()109         ShortyVector &GetShorty()
110         {
111             return shorty_;
112         }
113 
GetShorty()114         const ShortyVector &GetShorty() const
115         {
116             return shorty_;
117         }
118 
GetRefTypes()119         RefTypeVector &GetRefTypes()
120         {
121             return refTypes_;
122         }
123 
GetRefTypes()124         const RefTypeVector &GetRefTypes() const
125         {
126             return refTypes_;
127         }
128 
129         ~Proto() = default;
130 
131         DEFAULT_COPY_SEMANTIC(Proto);
132         DEFAULT_MOVE_SEMANTIC(Proto);
133 
134     private:
135         ShortyVector shorty_;
136         RefTypeVector refTypes_;
137     };
138 
139     class PANDA_PUBLIC_API ProtoId {
140     public:
ProtoId(const panda_file::File & pf,panda_file::File::EntityId protoId)141         ProtoId(const panda_file::File &pf, panda_file::File::EntityId protoId) : pf_(pf), protoId_(protoId) {}
142         bool operator==(const ProtoId &other) const;
143         bool operator==(const Proto &other) const;
144         bool operator!=(const ProtoId &other) const
145         {
146             return !operator==(other);
147         }
148         bool operator!=(const Proto &other) const
149         {
150             return !operator==(other);
151         }
152 
GetPandaFile()153         const panda_file::File &GetPandaFile() const
154         {
155             return pf_;
156         }
157 
GetEntityId()158         const panda_file::File::EntityId &GetEntityId() const
159         {
160             return protoId_;
161         }
162 
163         ~ProtoId() = default;
164 
165         DEFAULT_COPY_CTOR(ProtoId);
166         NO_COPY_OPERATOR(ProtoId);
167         NO_MOVE_SEMANTIC(ProtoId);
168 
169     private:
170         const panda_file::File &pf_;
171         panda_file::File::EntityId protoId_;
172     };
173     // CC-OFFNXT(G.FUN.01) solid logic
174     PANDA_PUBLIC_API Method(Class *klass, const panda_file::File *pf, panda_file::File::EntityId fileId,
175                             panda_file::File::EntityId codeId, uint32_t accessFlags, uint32_t numArgs,
176                             const uint16_t *shorty);
177 
Method(const Method * method)178     explicit Method(const Method *method)
179         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
180         // should become visible
181         : accessFlags_(method->accessFlags_.load(std::memory_order_acquire)),
182           numArgs_(method->numArgs_),
183           stor16Pair_(method->stor16Pair_),
184           classWord_(method->classWord_),
185           pandaFile_(method->pandaFile_),
186           fileId_(method->fileId_),
187           codeId_(method->codeId_),
188           shorty_(method->shorty_)
189     {
190         // NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
191         pointer_.nativePointer.store(
192             // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
193             // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
194             method->pointer_.nativePointer.load(std::memory_order_relaxed), std::memory_order_relaxed);
195 
196         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
197         // store which should become visible acquire
198         compiledEntryPoint_.store(method->IsNative() ? method->GetCompiledEntryPoint()
199                                                      : GetCompiledCodeToInterpreterBridge(method),
200                                   std::memory_order_release);
201         SetCompilationStatus(CompilationStage::NOT_COMPILED);
202     }
203 
204     Method() = delete;
205     Method(const Method &) = delete;
206     Method(Method &&) = delete;
207     Method &operator=(const Method &) = delete;
208     Method &operator=(Method &&) = delete;
209     ~Method() = default;
210 
GetNumArgs()211     uint32_t GetNumArgs() const
212     {
213         return numArgs_;
214     }
215 
GetNumVregs()216     uint32_t GetNumVregs() const
217     {
218         if (!codeId_.IsValid()) {
219             return 0;
220         }
221         return panda_file::CodeDataAccessor::GetNumVregs(*(pandaFile_), codeId_);
222     }
223 
GetCodeSize()224     uint32_t GetCodeSize() const
225     {
226         if (!codeId_.IsValid()) {
227             return 0;
228         }
229         panda_file::CodeDataAccessor cda(*(pandaFile_), codeId_);
230         return cda.GetCodeSize();
231     }
232 
GetInstructions()233     const uint8_t *GetInstructions() const
234     {
235         if (!codeId_.IsValid()) {
236             return nullptr;
237         }
238         return panda_file::CodeDataAccessor::GetInstructions(*pandaFile_, codeId_);
239     }
240 
241     /*
242      * Invoke the method as a static method.
243      * Number of arguments and their types must match the method's signature
244      */
245     PANDA_PUBLIC_API Value Invoke(ManagedThread *thread, Value *args, bool proxyCall = false);
246 
InvokeVoid(ManagedThread * thread,Value * args)247     void InvokeVoid(ManagedThread *thread, Value *args)
248     {
249         Invoke(thread, args);
250     }
251 
252     /*
253      * Invoke the method as a dynamic function.
254      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
255      * args - array of arguments. The first value must be the callee function object
256      * num_args - length of args array
257      * data - ark::ExtFrame language-related extension data
258      */
259     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
260 
261     template <class InvokeHelper>
262     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
263 
264     template <class InvokeHelper>
265     void InvokeEntry(ManagedThread *thread, Frame *currentFrame, Frame *frame, const uint8_t *pc);
266 
267     /*
268      * Enter execution context (ECMAScript generators)
269      * pc - pc of context
270      * acc - accumulator of context
271      * nregs - number of registers in context
272      * regs - registers of context
273      * data - ark::ExtFrame language-related extension data
274      */
275     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
276                                          uint32_t nregs, coretypes::TaggedValue *regs);
277 
278     template <class InvokeHelper>
279     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
280                                          uint32_t nregs, coretypes::TaggedValue *regs);
281 
282     /*
283      * Create new frame for native method, but don't start execution
284      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
285      * args - array of arguments. The first value must be the callee function object
286      * num_vregs - number of registers in frame
287      * num_args - length of args array
288      * data - ark::ExtFrame language-related extension data
289      */
290     template <class InvokeHelper, class ValueT>
291     Frame *EnterNativeMethodFrame(ManagedThread *thread, uint32_t numVregs, uint32_t numArgs, ValueT *args);
292 
293     /*
294      * Pop native method frame
295      */
296     // CC-OFFNXT(G.INC.10) false positive: static method
297     static void ExitNativeMethodFrame(ManagedThread *thread);
298 
GetClass()299     Class *GetClass() const
300     {
301         return reinterpret_cast<Class *>(classWord_);
302     }
303 
SetClass(Class * cls)304     void SetClass(Class *cls)
305     {
306         classWord_ = static_cast<ClassHelper::ClassWordSize>(ToObjPtrType(cls));
307     }
308 
SetPandaFile(const panda_file::File * file)309     void SetPandaFile(const panda_file::File *file)
310     {
311         pandaFile_ = file;
312     }
313 
GetPandaFile()314     const panda_file::File *GetPandaFile() const
315     {
316         return pandaFile_;
317     }
318 
GetFileId()319     panda_file::File::EntityId GetFileId() const
320     {
321         return fileId_;
322     }
323 
GetCodeId()324     panda_file::File::EntityId GetCodeId() const
325     {
326         return codeId_;
327     }
328 
GetHotnessCounter()329     inline int16_t GetHotnessCounter() const
330     {
331         return stor16Pair_.hotnessCounter;
332     }
333 
DecrementHotnessCounter()334     inline NO_THREAD_SANITIZE void DecrementHotnessCounter()
335     {
336         --stor16Pair_.hotnessCounter;
337     }
338 
339     // CC-OFFNXT(G.INC.10) false positive: static method
340     static NO_THREAD_SANITIZE int16_t GetInitialHotnessCounter();
341 
342     NO_THREAD_SANITIZE void ResetHotnessCounter();
343 
344     template <class AccVRegisterPtrT>
345     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] AccVRegisterPtrT acc);
346     template <class AccVRegisterPtrT>
347     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] ManagedThread *thread, [[maybe_unused]] AccVRegisterPtrT acc);
348 
349     // NO_THREAD_SANITIZE because of perfomance degradation (see commit 7c913cb1 and MR 997#note_113500)
350     template <bool IS_CALL, class AccVRegisterPtrT>
351     NO_THREAD_SANITIZE bool DecrementHotnessCounter(uintptr_t bytecodeOffset, [[maybe_unused]] AccVRegisterPtrT cc,
352                                                     bool osr = false,
353                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
354 
355     template <bool IS_CALL, class AccVRegisterPtrT>
356     NO_THREAD_SANITIZE bool DecrementHotnessCounter(ManagedThread *thread, uintptr_t bcOffset,
357                                                     [[maybe_unused]] AccVRegisterPtrT cc, bool osr = false,
358                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
359 
360     // NOTE(xucheng): change the input type to uint16_t when we don't input the max num of int32_t
SetHotnessCounter(uint32_t counter)361     inline NO_THREAD_SANITIZE void SetHotnessCounter(uint32_t counter)
362     {
363         stor16Pair_.hotnessCounter = static_cast<uint16_t>(counter);
364     }
365 
366     PANDA_PUBLIC_API int64_t GetBranchTakenCounter(uint32_t pc);
367     PANDA_PUBLIC_API int64_t GetBranchNotTakenCounter(uint32_t pc);
368 
369     int64_t GetThrowTakenCounter(uint32_t pc);
370 
GetCompiledEntryPoint()371     const void *GetCompiledEntryPoint()
372     {
373         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
374         // should become visible
375         return compiledEntryPoint_.load(std::memory_order_acquire);
376     }
377 
GetCompiledEntryPoint()378     const void *GetCompiledEntryPoint() const
379     {
380         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
381         // should become visible
382         return compiledEntryPoint_.load(std::memory_order_acquire);
383     }
384 
SetCompiledEntryPoint(const void * entryPoint)385     void SetCompiledEntryPoint(const void *entryPoint)
386     {
387         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
388         // store which should become visible acquire
389         compiledEntryPoint_.store(entryPoint, std::memory_order_release);
390     }
391 
SetInterpreterEntryPoint()392     void SetInterpreterEntryPoint()
393     {
394         if (!IsNative()) {
395             SetCompiledEntryPoint(GetCompiledCodeToInterpreterBridge(this));
396         }
397     }
398 
HasCompiledCode()399     bool HasCompiledCode() const
400     {
401         auto entryPoint = GetCompiledEntryPoint();
402         return entryPoint != GetCompiledCodeToInterpreterBridge() &&
403                entryPoint != GetCompiledCodeToInterpreterBridgeDyn();
404     }
405 
GetCompilationStatus()406     inline CompilationStage GetCompilationStatus() const
407     {
408         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
409         // should become visible
410         return static_cast<CompilationStage>((accessFlags_.load(std::memory_order_acquire) & COMPILATION_STATUS_MASK) >>
411                                              COMPILATION_STATUS_SHIFT);
412     }
413 
GetCompilationStatus(uint32_t value)414     inline CompilationStage GetCompilationStatus(uint32_t value)
415     {
416         return static_cast<CompilationStage>((value & COMPILATION_STATUS_MASK) >> COMPILATION_STATUS_SHIFT);
417     }
418 
SetCompilationStatus(enum CompilationStage newStatus)419     inline void SetCompilationStatus(enum CompilationStage newStatus)
420     {
421         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
422         // should become visible
423         auto result = (accessFlags_.load(std::memory_order_acquire) & ~COMPILATION_STATUS_MASK) |
424                       static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
425         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
426         // which should become visible acquire
427         accessFlags_.store(result, std::memory_order_release);
428     }
429 
AtomicSetCompilationStatus(enum CompilationStage oldStatus,enum CompilationStage newStatus)430     inline bool AtomicSetCompilationStatus(enum CompilationStage oldStatus, enum CompilationStage newStatus)
431     {
432         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
433         // should become visible
434         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
435         while (GetCompilationStatus(oldValue) == oldStatus) {
436             uint32_t newValue = MakeCompilationStatusValue(oldValue, newStatus);
437             if (accessFlags_.compare_exchange_strong(oldValue, newValue)) {
438                 return true;
439             }
440         }
441         return false;
442     }
443 
444     panda_file::Type GetReturnType() const;
445 
446     panda_file::File::StringData GetRefReturnType() const;
447 
448     // idx - index number of the argument in the signature
449     PANDA_PUBLIC_API panda_file::Type GetArgType(size_t idx) const;
450 
451     PANDA_PUBLIC_API panda_file::File::StringData GetRefArgType(size_t idx) const;
452 
453     template <typename Callback>
454     void EnumerateTypes(Callback handler) const;
455 
456     PANDA_PUBLIC_API panda_file::File::StringData GetName() const;
457 
458     PANDA_PUBLIC_API panda_file::File::StringData GetClassName() const;
459 
460     PANDA_PUBLIC_API PandaString GetFullName(bool withSignature = false) const;
461     PANDA_PUBLIC_API PandaString GetLineNumberAndSourceFile(uint32_t bcOffset) const;
462 
463     // CC-OFFNXT(G.INC.10) false positive: static method
464     static uint32_t GetFullNameHashFromString(const PandaString &str);
465     // CC-OFFNXT(G.INC.10) false positive: static method
466     static uint32_t GetClassNameHashFromString(const PandaString &str);
467 
468     PANDA_PUBLIC_API Proto GetProto() const;
469 
470     PANDA_PUBLIC_API ProtoId GetProtoId() const;
471 
GetFrameSize()472     size_t GetFrameSize() const
473     {
474         return Frame::GetAllocSize(GetNumArgs() + GetNumVregs(), EMPTY_EXT_FRAME_DATA_SIZE);
475     }
476 
477     uint32_t GetNumericalAnnotation(AnnotationField fieldId) const;
478     panda_file::File::StringData GetStringDataAnnotation(AnnotationField fieldId) const;
479 
GetAccessFlags()480     uint32_t GetAccessFlags() const
481     {
482         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
483         // should become visible
484         return accessFlags_.load(std::memory_order_acquire);
485     }
486 
SetAccessFlags(uint32_t accessFlags)487     void SetAccessFlags(uint32_t accessFlags)
488     {
489         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
490         // which should become visible acquire
491         accessFlags_.store(accessFlags, std::memory_order_release);
492     }
493 
IsStatic()494     bool IsStatic() const
495     {
496         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
497         // should become visible
498         return (accessFlags_.load(std::memory_order_acquire) & ACC_STATIC) != 0;
499     }
500 
IsNative()501     bool IsNative() const
502     {
503         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
504         // should become visible
505         return (accessFlags_.load(std::memory_order_acquire) & ACC_NATIVE) != 0;
506     }
507 
IsPublic()508     bool IsPublic() const
509     {
510         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
511         // should become visible
512         return (accessFlags_.load(std::memory_order_acquire) & ACC_PUBLIC) != 0;
513     }
514 
IsPrivate()515     bool IsPrivate() const
516     {
517         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
518         // should become visible
519         return (accessFlags_.load(std::memory_order_acquire) & ACC_PRIVATE) != 0;
520     }
521 
IsProtected()522     bool IsProtected() const
523     {
524         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
525         // should become visible
526         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROTECTED) != 0;
527     }
528 
IsIntrinsic()529     bool IsIntrinsic() const
530     {
531         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
532         // should become visible
533         return (accessFlags_.load(std::memory_order_acquire) & ACC_INTRINSIC) != 0;
534     }
535 
IsSynthetic()536     bool IsSynthetic() const
537     {
538         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
539         // should become visible
540         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNTHETIC) != 0;
541     }
542 
IsAbstract()543     bool IsAbstract() const
544     {
545         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
546         // should become visible
547         return (accessFlags_.load(std::memory_order_acquire) & ACC_ABSTRACT) != 0;
548     }
549 
IsFinal()550     bool IsFinal() const
551     {
552         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
553         // should become visible
554         return (accessFlags_.load(std::memory_order_acquire) & ACC_FINAL) != 0;
555     }
556 
IsSynchronized()557     bool IsSynchronized() const
558     {
559         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
560         // should become visible
561         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNCHRONIZED) != 0;
562     }
563 
HasVarArgs()564     bool HasVarArgs() const
565     {
566         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
567         // should become visible
568         return (accessFlags_.load(std::memory_order_acquire) & ACC_VARARGS) != 0;
569     }
570 
HasSingleImplementation()571     bool HasSingleImplementation() const
572     {
573         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
574         // should become visible
575         return (accessFlags_.load(std::memory_order_acquire) & ACC_SINGLE_IMPL) != 0;
576     }
577 
IsProfiled()578     bool IsProfiled() const
579     {
580         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
581         // should become visible
582         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROFILING) != 0;
583     }
584 
IsDestroyed()585     bool IsDestroyed() const
586     {
587         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
588         // should become visible
589         return (accessFlags_.load(std::memory_order_acquire) & ACC_DESTROYED) != 0;
590     }
591 
SetHasSingleImplementation(bool v)592     void SetHasSingleImplementation(bool v)
593     {
594         if (v) {
595             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
596             // and on writes before the store
597             accessFlags_.fetch_or(ACC_SINGLE_IMPL, std::memory_order_acq_rel);
598         } else {
599             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
600             // and on writes before the store
601             accessFlags_.fetch_and(~ACC_SINGLE_IMPL, std::memory_order_acq_rel);
602         }
603     }
604 
SetProfiled()605     void SetProfiled()
606     {
607         ASSERT(!IsIntrinsic());
608         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
609         // and on writes before the store
610         accessFlags_.fetch_or(ACC_PROFILING, std::memory_order_acq_rel);
611     }
612 
SetDestroyed()613     void SetDestroyed()
614     {
615         ASSERT(!IsIntrinsic());
616         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
617         // and on writes before the store
618         accessFlags_.fetch_or(ACC_DESTROYED, std::memory_order_acq_rel);
619     }
620 
GetSingleImplementation()621     Method *GetSingleImplementation()
622     {
623         return HasSingleImplementation() ? this : nullptr;
624     }
625 
SetIntrinsic(intrinsics::Intrinsic intrinsic)626     void SetIntrinsic(intrinsics::Intrinsic intrinsic)
627     {
628         ASSERT(!IsIntrinsic());
629         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
630         // should become visible
631         ASSERT((accessFlags_.load(std::memory_order_acquire) & INTRINSIC_MASK) == 0);
632         auto result = ACC_INTRINSIC | static_cast<uint32_t>(intrinsic) << INTRINSIC_SHIFT;
633         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
634         // on writes before the store
635         accessFlags_.fetch_or(result, std::memory_order_acq_rel);
636     }
637 
GetIntrinsic()638     intrinsics::Intrinsic GetIntrinsic() const
639     {
640         ASSERT(IsIntrinsic());
641         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
642         // should become visible
643         return static_cast<intrinsics::Intrinsic>((accessFlags_.load(std::memory_order_acquire) & INTRINSIC_MASK) >>
644                                                   INTRINSIC_SHIFT);
645     }
646 
SetVTableIndex(uint16_t vtableIndex)647     void SetVTableIndex(uint16_t vtableIndex)
648     {
649         stor16Pair_.vtableIndex = vtableIndex;
650     }
651 
GetVTableIndex()652     uint16_t GetVTableIndex() const
653     {
654         return stor16Pair_.vtableIndex;
655     }
656 
SetNativePointer(void * nativePointer)657     void SetNativePointer(void *nativePointer)
658     {
659         ASSERT((IsNative() || IsProxy()));
660         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
661         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
662         pointer_.nativePointer.store(nativePointer, std::memory_order_relaxed);
663     }
664 
GetNativePointer()665     void *GetNativePointer() const
666     {
667         ASSERT((IsNative() || IsProxy()));
668         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
669         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
670         return pointer_.nativePointer.load(std::memory_order_relaxed);
671     }
672 
GetShorty()673     const uint16_t *GetShorty() const
674     {
675         return shorty_;
676     }
677 
678     uint32_t FindCatchBlockInPandaFile(const Class *cls, uint32_t pc) const;
679     uint32_t FindCatchBlock(const Class *cls, uint32_t pc) const;
680 
681     PANDA_PUBLIC_API panda_file::Type GetEffectiveArgType(size_t idx) const;
682 
683     PANDA_PUBLIC_API panda_file::Type GetEffectiveReturnType() const;
684 
SetIsDefaultInterfaceMethod()685     void SetIsDefaultInterfaceMethod()
686     {
687         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
688         // on writes before the store
689         accessFlags_.fetch_or(ACC_DEFAULT_INTERFACE_METHOD, std::memory_order_acq_rel);
690     }
691 
IsDefaultInterfaceMethod()692     bool IsDefaultInterfaceMethod() const
693     {
694         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
695         // should become visible
696         return (accessFlags_.load(std::memory_order_acquire) & ACC_DEFAULT_INTERFACE_METHOD) != 0;
697     }
698 
IsConstructor()699     bool IsConstructor() const
700     {
701         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
702         // should become visible
703         return (accessFlags_.load(std::memory_order_acquire) & ACC_CONSTRUCTOR) != 0;
704     }
705 
IsInstanceConstructor()706     bool IsInstanceConstructor() const
707     {
708         return IsConstructor() && !IsStatic();
709     }
710 
IsStaticConstructor()711     bool IsStaticConstructor() const
712     {
713         return IsConstructor() && IsStatic();
714     }
715 
716     // CC-OFFNXT(G.INC.10) false positive: static method
GetAccessFlagsOffset()717     static constexpr uint32_t GetAccessFlagsOffset()
718     {
719         return MEMBER_OFFSET(Method, accessFlags_);
720     }
721     // CC-OFFNXT(G.INC.10) false positive: static method
GetNumArgsOffset()722     static constexpr uint32_t GetNumArgsOffset()
723     {
724         return MEMBER_OFFSET(Method, numArgs_);
725     }
726     // CC-OFFNXT(G.INC.10) false positive: static method
GetVTableIndexOffset()727     static constexpr uint32_t GetVTableIndexOffset()
728     {
729         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, vtableIndex);
730     }
731     // CC-OFFNXT(G.INC.10) false positive: static method
GetHotnessCounterOffset()732     static constexpr uint32_t GetHotnessCounterOffset()
733     {
734         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, hotnessCounter);
735     }
736     // CC-OFFNXT(G.INC.10) false positive: static method
GetClassOffset()737     static constexpr uint32_t GetClassOffset()
738     {
739         return MEMBER_OFFSET(Method, classWord_);
740     }
741 
742     // CC-OFFNXT(G.INC.10) false positive: static method
GetCompiledEntryPointOffset()743     static constexpr uint32_t GetCompiledEntryPointOffset()
744     {
745         return MEMBER_OFFSET(Method, compiledEntryPoint_);
746     }
747     // CC-OFFNXT(G.INC.10) false positive: static method
GetPandaFileOffset()748     static constexpr uint32_t GetPandaFileOffset()
749     {
750         return MEMBER_OFFSET(Method, pandaFile_);
751     }
752     // CC-OFFNXT(G.INC.10) false positive: static method
GetCodeIdOffset()753     static constexpr uint32_t GetCodeIdOffset()
754     {
755         return MEMBER_OFFSET(Method, codeId_);
756     }
757     // CC-OFFNXT(G.INC.10) false positive: static method
GetNativePointerOffset()758     static constexpr uint32_t GetNativePointerOffset()
759     {
760         return MEMBER_OFFSET(Method, pointer_);
761     }
762     // CC-OFFNXT(G.INC.10) false positive: static method
GetShortyOffset()763     static constexpr uint32_t GetShortyOffset()
764     {
765         return MEMBER_OFFSET(Method, shorty_);
766     }
767 
768     template <typename Callback>
769     void EnumerateTryBlocks(Callback callback) const;
770 
771     template <typename Callback>
772     void EnumerateCatchBlocks(Callback callback) const;
773 
774     template <typename Callback>
775     void EnumerateExceptionHandlers(Callback callback) const;
776 
777     // CC-OFFNXT(G.INC.10) false positive: static method
CalcUniqId(const panda_file::File * file,panda_file::File::EntityId fileId)778     static inline UniqId CalcUniqId(const panda_file::File *file, panda_file::File::EntityId fileId)
779     {
780         constexpr uint64_t HALF = 32ULL;
781         uint64_t uid = file->GetUniqId();
782         uid <<= HALF;
783         uid |= fileId.GetOffset();
784         return uid;
785     }
786 
787     // for synthetic methods, like array .ctor
788     // CC-OFFNXT(G.INC.10) false positive: static method
789     static UniqId CalcUniqId(const uint8_t *classDescr, const uint8_t *name);
790 
GetUniqId()791     UniqId GetUniqId() const
792     {
793         return CalcUniqId(pandaFile_, fileId_);
794     }
795 
796     size_t GetLineNumFromBytecodeOffset(uint32_t bcOffset) const;
797 
798     panda_file::File::StringData GetClassSourceFile() const;
799 
800     PANDA_PUBLIC_API void StartProfiling();
801     PANDA_PUBLIC_API void StopProfiling();
802 
803     bool IsProxy() const;
804 
GetProfilingData()805     ProfilingData *GetProfilingData()
806     {
807         if (UNLIKELY(IsNative() || IsProxy())) {
808             return nullptr;
809         }
810         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
811         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
812         return pointer_.profilingData.load(std::memory_order_acquire);
813     }
814 
GetProfilingDataWithoutCheck()815     ProfilingData *GetProfilingDataWithoutCheck()
816     {
817         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
818         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
819         return pointer_.profilingData.load(std::memory_order_acquire);
820     }
821 
GetProfilingData()822     const ProfilingData *GetProfilingData() const
823     {
824         if (UNLIKELY(IsNative() || IsProxy())) {
825             return nullptr;
826         }
827         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
828         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
829         return pointer_.profilingData.load(std::memory_order_acquire);
830     }
831 
IsProfiling()832     bool IsProfiling() const
833     {
834         return GetProfilingData() != nullptr;
835     }
836 
IsProfilingWithoutLock()837     bool IsProfilingWithoutLock() const
838     {
839         if (UNLIKELY(IsNative() || IsProxy())) {
840             return false;
841         }
842         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
843         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
844         return pointer_.profilingData.load(std::memory_order_acquire) != nullptr;
845     }
846 
847     void SetVerified(bool result);
848     bool IsVerified() const;
849     PANDA_PUBLIC_API bool Verify();
850     template <bool IS_CALL>
851     bool TryVerify();
852 
853     // CC-OFFNXT(G.INC.10) false positive: static method
GetVerificationStage(uint32_t value)854     inline static VerificationStage GetVerificationStage(uint32_t value)
855     {
856         return static_cast<VerificationStage>((value & VERIFICATION_STATUS_MASK) >> VERIFICATION_STATUS_SHIFT);
857     }
858 
GetVerificationStage()859     inline VerificationStage GetVerificationStage() const
860     {
861         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
862         // should become visible
863         return GetVerificationStage(accessFlags_.load(std::memory_order_acquire));
864     }
865 
SetVerificationStage(enum VerificationStage newStage)866     inline void SetVerificationStage(enum VerificationStage newStage)
867     {
868         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
869         // should become visible
870         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
871         uint32_t newValue = MakeVerificationStageValue(oldValue, newStage);
872         while (!accessFlags_.compare_exchange_weak(oldValue, newValue, std::memory_order_acq_rel)) {
873             newValue = MakeVerificationStageValue(oldValue, newStage);
874         }
875     }
876 
877 private:
878     inline void FillVecsByInsts(BytecodeInstruction &inst, PandaVector<uint32_t> &vcalls,
879                                 PandaVector<uint32_t> &branches, PandaVector<uint32_t> &throws) const;
880 
881     Value InvokeCompiledCode(ManagedThread *thread, uint32_t numArgs, Value *args);
882 
GetReturnValueFromTaggedValue(uint64_t retValue)883     Value GetReturnValueFromTaggedValue(uint64_t retValue)
884     {
885         panda_file::Type retType = GetReturnType();
886         if (retType.GetId() == panda_file::Type::TypeId::VOID) {
887             return Value(static_cast<int64_t>(0));
888         }
889         if (retType.GetId() == panda_file::Type::TypeId::REFERENCE) {
890             return Value(reinterpret_cast<ObjectHeader *>(retValue));
891         }
892         return Value(retValue);
893     }
894 
895     // CC-OFFNXT(G.INC.10) false positive: static method
MakeCompilationStatusValue(uint32_t value,CompilationStage newStatus)896     inline static uint32_t MakeCompilationStatusValue(uint32_t value, CompilationStage newStatus)
897     {
898         value &= ~COMPILATION_STATUS_MASK;
899         value |= static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
900         return value;
901     }
902 
903     // CC-OFFNXT(G.INC.10) false positive: static method
MakeVerificationStageValue(uint32_t value,VerificationStage newStage)904     inline static uint32_t MakeVerificationStageValue(uint32_t value, VerificationStage newStage)
905     {
906         value &= ~VERIFICATION_STATUS_MASK;
907         value |= static_cast<uint32_t>(newStage) << VERIFICATION_STATUS_SHIFT;
908         return value;
909     }
910 
911     template <class InvokeHelper, class ValueT>
912     ValueT InvokeInterpretedCode(ManagedThread *thread, uint32_t numActualArgs, ValueT *args);
913 
914     template <class InvokeHelper, class ValueT>
915     PandaUniquePtr<Frame, FrameDeleter> InitFrame(ManagedThread *thread, uint32_t numActualArgs, ValueT *args,
916                                                   Frame *currentFrame);
917 
918     template <class InvokeHelper, class ValueT, bool IS_NATIVE_METHOD>
919     PandaUniquePtr<Frame, FrameDeleter> InitFrameWithNumVRegs(ManagedThread *thread, uint32_t numVregs,
920                                                               uint32_t numActualArgs, ValueT *args,
921                                                               Frame *currentFrame);
922 
923     template <class InvokeHelper, class ValueT>
924     ValueT GetReturnValueFromException();
925 
926     template <class InvokeHelper, class ValueT>
927     ValueT GetReturnValueFromAcc(interpreter::AccVRegister &aacVreg);
928 
929     template <class InvokeHelper, class ValueT>
930     ValueT InvokeImpl(ManagedThread *thread, uint32_t numActualArgs, ValueT *args, bool proxyCall);
931 
932     template <bool IS_CALL>
933     inline bool DecrementHotnessCounterForTaggedFunction(ManagedThread *thread, uintptr_t bcOffset, bool osr,
934                                                          coretypes::TaggedValue func);
935 
936 private:
937     union PointerInMethod {
938         // It's native pointer when the method is native or proxy method.
939         std::atomic<void *> nativePointer;
940         // It's profiling data when the method isn't native or proxy method.
941         std::atomic<ProfilingData *> profilingData;
942     };
943 
944     struct Storage16Pair {
945         uint16_t vtableIndex;
946         int16_t hotnessCounter;
947     };
948 
949     std::atomic_uint32_t accessFlags_;
950     uint32_t numArgs_;
951     Storage16Pair stor16Pair_;
952     ClassHelper::ClassWordSize classWord_;
953 
954     std::atomic<const void *> compiledEntryPoint_ {nullptr};
955     const panda_file::File *pandaFile_;
956     union PointerInMethod pointer_ {
957     };
958 
959     panda_file::File::EntityId fileId_;
960     panda_file::File::EntityId codeId_;
961     const uint16_t *shorty_;
962 };
963 
964 static_assert(!std::is_polymorphic_v<Method>);
965 
966 }  // namespace ark
967 
968 #endif  // PANDA_RUNTIME_METHOD_H_
969