• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_METHOD_H_
16 #define PANDA_RUNTIME_METHOD_H_
17 
18 #include <atomic>
19 #include <cstdint>
20 #include <functional>
21 #include <string_view>
22 
23 #include "intrinsics_enum.h"
24 #include "libpandabase/utils/arch.h"
25 #include "libpandabase/utils/logger.h"
26 #include "libpandafile/code_data_accessor-inl.h"
27 #include "libpandafile/file.h"
28 #include "libpandafile/file_items.h"
29 #include "libpandafile/method_data_accessor.h"
30 #include "libpandafile/modifiers.h"
31 #include "runtime/bridge/bridge.h"
32 #include "runtime/include/compiler_interface.h"
33 #include "runtime/include/class_helper.h"
34 #include "runtime/include/mem/panda_containers.h"
35 #include "runtime/include/mem/panda_smart_pointers.h"
36 #include "runtime/interpreter/frame.h"
37 #include "value.h"
38 
39 namespace ark {
40 
41 class Class;
42 class ManagedThread;
43 class ProfilingData;
44 
45 #ifdef PANDA_ENABLE_GLOBAL_REGISTER_VARIABLES
46 namespace interpreter {
47 class AccVRegisterT;
48 }  // namespace interpreter
49 using interpreter::AccVRegisterT;
50 #else
51 namespace interpreter {
52 using AccVRegisterT = AccVRegister;
53 }  // namespace interpreter
54 #endif
55 
56 class FrameDeleter {
57 public:
FrameDeleter(ManagedThread * thread)58     explicit FrameDeleter(ManagedThread *thread) : thread_(thread) {}
59 
60     void operator()(Frame *frame) const;
61 
62 private:
63     ManagedThread *thread_;
64 };
65 
66 class Method {
67 public:
68     using UniqId = uint64_t;
69 
70     enum CompilationStage {
71         NOT_COMPILED,
72         WAITING,
73         COMPILATION,
74         COMPILED,
75         FAILED,
76     };
77 
78     enum class VerificationStage { NOT_VERIFIED = 0, VERIFIED_FAIL = 1, VERIFIED_OK = 2, LAST = VERIFIED_OK };
79 
80     static_assert(MinimumBitsToStore(VerificationStage::LAST) <= VERIFICATION_STATUS_WIDTH);
81 
82     using AnnotationField = panda_file::MethodDataAccessor::AnnotationField;
83 
84     class Proto {
85     public:
86         using ShortyVector = PandaSmallVector<panda_file::Type>;
87         using RefTypeVector = PandaSmallVector<std::string_view>;
88         Proto() = default;
89 
90         Proto(const panda_file::File &pf, panda_file::File::EntityId protoId);
91 
Proto(ShortyVector shorty,RefTypeVector refTypes)92         Proto(ShortyVector shorty, RefTypeVector refTypes) : shorty_(std::move(shorty)), refTypes_(std::move(refTypes))
93         {
94         }
95 
96         bool operator==(const Proto &other) const
97         {
98             return shorty_ == other.shorty_ && refTypes_ == other.refTypes_;
99         }
100 
GetReturnType()101         panda_file::Type GetReturnType() const
102         {
103             return shorty_[0];
104         }
105 
106         PANDA_PUBLIC_API std::string_view GetReturnTypeDescriptor() const;
107         PandaString GetSignature(bool includeReturnType = true);
108 
GetShorty()109         ShortyVector &GetShorty()
110         {
111             return shorty_;
112         }
113 
GetShorty()114         const ShortyVector &GetShorty() const
115         {
116             return shorty_;
117         }
118 
GetRefTypes()119         RefTypeVector &GetRefTypes()
120         {
121             return refTypes_;
122         }
123 
GetRefTypes()124         const RefTypeVector &GetRefTypes() const
125         {
126             return refTypes_;
127         }
128 
129         ~Proto() = default;
130 
131         DEFAULT_COPY_SEMANTIC(Proto);
132         DEFAULT_MOVE_SEMANTIC(Proto);
133 
134     private:
135         ShortyVector shorty_;
136         RefTypeVector refTypes_;
137     };
138 
139     class PANDA_PUBLIC_API ProtoId {
140     public:
ProtoId(const panda_file::File & pf,panda_file::File::EntityId protoId)141         ProtoId(const panda_file::File &pf, panda_file::File::EntityId protoId) : pf_(pf), protoId_(protoId) {}
142         bool operator==(const ProtoId &other) const;
143         bool operator==(const Proto &other) const;
144         bool operator!=(const ProtoId &other) const
145         {
146             return !operator==(other);
147         }
148         bool operator!=(const Proto &other) const
149         {
150             return !operator==(other);
151         }
152 
GetPandaFile()153         const panda_file::File &GetPandaFile() const
154         {
155             return pf_;
156         }
157 
GetEntityId()158         const panda_file::File::EntityId &GetEntityId() const
159         {
160             return protoId_;
161         }
162 
163         ~ProtoId() = default;
164 
165         DEFAULT_COPY_CTOR(ProtoId);
166         NO_COPY_OPERATOR(ProtoId);
167         NO_MOVE_SEMANTIC(ProtoId);
168 
169     private:
170         const panda_file::File &pf_;
171         panda_file::File::EntityId protoId_;
172     };
173     // CC-OFFNXT(G.FUN.01) solid logic
174     PANDA_PUBLIC_API Method(Class *klass, const panda_file::File *pf, panda_file::File::EntityId fileId,
175                             panda_file::File::EntityId codeId, uint32_t accessFlags, uint32_t numArgs,
176                             const uint16_t *shorty);
177 
Method(const Method * method)178     explicit Method(const Method *method)
179         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
180         // should become visible
181         : accessFlags_(method->accessFlags_.load(std::memory_order_acquire)),
182           numArgs_(method->numArgs_),
183           stor16Pair_(method->stor16Pair_),
184           classWord_(method->classWord_),
185           pandaFile_(method->pandaFile_),
186           fileId_(method->fileId_),
187           codeId_(method->codeId_),
188           shorty_(method->shorty_)
189     {
190         // NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
191         pointer_.nativePointer.store(
192             // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
193             // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
194             method->pointer_.nativePointer.load(std::memory_order_relaxed), std::memory_order_relaxed);
195 
196         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
197         // store which should become visible acquire
198         compiledEntryPoint_.store(method->IsNative() ? method->GetCompiledEntryPoint()
199                                                      : GetCompiledCodeToInterpreterBridge(method),
200                                   std::memory_order_release);
201         SetCompilationStatus(CompilationStage::NOT_COMPILED);
202     }
203 
204     Method() = delete;
205     Method(const Method &) = delete;
206     Method(Method &&) = delete;
207     Method &operator=(const Method &) = delete;
208     Method &operator=(Method &&) = delete;
209     ~Method() = default;
210 
GetNumArgs()211     uint32_t GetNumArgs() const
212     {
213         return numArgs_;
214     }
215 
GetNumVregs()216     uint32_t GetNumVregs() const
217     {
218         if (!codeId_.IsValid()) {
219             return 0;
220         }
221         return panda_file::CodeDataAccessor::GetNumVregs(*(pandaFile_), codeId_);
222     }
223 
GetCodeSize()224     uint32_t GetCodeSize() const
225     {
226         if (!codeId_.IsValid()) {
227             return 0;
228         }
229         panda_file::CodeDataAccessor cda(*(pandaFile_), codeId_);
230         return cda.GetCodeSize();
231     }
232 
GetInstructions()233     const uint8_t *GetInstructions() const
234     {
235         if (!codeId_.IsValid()) {
236             return nullptr;
237         }
238         return panda_file::CodeDataAccessor::GetInstructions(*pandaFile_, codeId_);
239     }
240 
241     /*
242      * Invoke the method as a static method.
243      * Number of arguments and their types must match the method's signature
244      */
245     PANDA_PUBLIC_API Value Invoke(ManagedThread *thread, Value *args, bool proxyCall = false);
246 
InvokeVoid(ManagedThread * thread,Value * args)247     void InvokeVoid(ManagedThread *thread, Value *args)
248     {
249         Invoke(thread, args);
250     }
251 
252     /*
253      * Invoke the method as a dynamic function.
254      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
255      * args - array of arguments. The first value must be the callee function object
256      * num_args - length of args array
257      * data - ark::ExtFrame language-related extension data
258      */
259     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
260 
261     template <class InvokeHelper>
262     coretypes::TaggedValue InvokeDyn(ManagedThread *thread, uint32_t numArgs, coretypes::TaggedValue *args);
263 
264     template <class InvokeHelper>
265     void InvokeEntry(ManagedThread *thread, Frame *currentFrame, Frame *frame, const uint8_t *pc);
266 
267     /*
268      * Enter execution context (ECMAScript generators)
269      * pc - pc of context
270      * acc - accumulator of context
271      * nregs - number of registers in context
272      * regs - registers of context
273      * data - ark::ExtFrame language-related extension data
274      */
275     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
276                                          uint32_t nregs, coretypes::TaggedValue *regs);
277 
278     template <class InvokeHelper>
279     coretypes::TaggedValue InvokeContext(ManagedThread *thread, const uint8_t *pc, coretypes::TaggedValue *acc,
280                                          uint32_t nregs, coretypes::TaggedValue *regs);
281 
282     /*
283      * Create new frame for native method, but don't start execution
284      * Number of arguments may vary, all arguments must be of type coretypes::TaggedValue.
285      * args - array of arguments. The first value must be the callee function object
286      * num_vregs - number of registers in frame
287      * num_args - length of args array
288      * data - ark::ExtFrame language-related extension data
289      */
290     template <class InvokeHelper, class ValueT>
291     Frame *EnterNativeMethodFrame(ManagedThread *thread, uint32_t numVregs, uint32_t numArgs, ValueT *args);
292 
293     /*
294      * Pop native method frame
295      */
296     // CC-OFFNXT(G.INC.10) false positive: static method
297     static void ExitNativeMethodFrame(ManagedThread *thread);
298 
GetClass()299     Class *GetClass() const
300     {
301         return reinterpret_cast<Class *>(classWord_);
302     }
303 
SetClass(Class * cls)304     void SetClass(Class *cls)
305     {
306         classWord_ = static_cast<ClassHelper::ClassWordSize>(ToObjPtrType(cls));
307     }
308 
SetPandaFile(const panda_file::File * file)309     void SetPandaFile(const panda_file::File *file)
310     {
311         pandaFile_ = file;
312     }
313 
GetPandaFile()314     const panda_file::File *GetPandaFile() const
315     {
316         return pandaFile_;
317     }
318 
GetFileId()319     panda_file::File::EntityId GetFileId() const
320     {
321         return fileId_;
322     }
323 
GetCodeId()324     panda_file::File::EntityId GetCodeId() const
325     {
326         return codeId_;
327     }
328 
GetHotnessCounter()329     inline int16_t GetHotnessCounter() const
330     {
331         return stor16Pair_.hotnessCounter;
332     }
333 
DecrementHotnessCounter()334     inline NO_THREAD_SANITIZE void DecrementHotnessCounter()
335     {
336         --stor16Pair_.hotnessCounter;
337     }
338 
339     // CC-OFFNXT(G.INC.10) false positive: static method
340     static NO_THREAD_SANITIZE int16_t GetInitialHotnessCounter();
341 
342     NO_THREAD_SANITIZE void ResetHotnessCounter();
343 
344     template <class AccVRegisterPtrT>
345     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] AccVRegisterPtrT acc);
346     template <class AccVRegisterPtrT>
347     NO_THREAD_SANITIZE void SetAcc([[maybe_unused]] ManagedThread *thread, [[maybe_unused]] AccVRegisterPtrT acc);
348 
349     // NO_THREAD_SANITIZE because of perfomance degradation (see commit 7c913cb1 and MR 997#note_113500)
350     template <bool IS_CALL, class AccVRegisterPtrT>
351     NO_THREAD_SANITIZE bool DecrementHotnessCounter(uintptr_t bytecodeOffset, [[maybe_unused]] AccVRegisterPtrT cc,
352                                                     bool osr = false,
353                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
354 
355     template <bool IS_CALL, class AccVRegisterPtrT>
356     NO_THREAD_SANITIZE bool DecrementHotnessCounter(ManagedThread *thread, uintptr_t bcOffset,
357                                                     [[maybe_unused]] AccVRegisterPtrT cc, bool osr = false,
358                                                     coretypes::TaggedValue func = coretypes::TaggedValue::Hole());
359 
360     // NOTE(xucheng): change the input type to uint16_t when we don't input the max num of int32_t
SetHotnessCounter(uint32_t counter)361     inline NO_THREAD_SANITIZE void SetHotnessCounter(uint32_t counter)
362     {
363         stor16Pair_.hotnessCounter = static_cast<uint16_t>(counter);
364     }
365 
366     PANDA_PUBLIC_API int64_t GetBranchTakenCounter(uint32_t pc);
367     PANDA_PUBLIC_API int64_t GetBranchNotTakenCounter(uint32_t pc);
368 
369     int64_t GetThrowTakenCounter(uint32_t pc);
370 
GetCompiledEntryPoint()371     const void *GetCompiledEntryPoint()
372     {
373         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
374         // should become visible
375         return compiledEntryPoint_.load(std::memory_order_acquire);
376     }
377 
GetCompiledEntryPoint()378     const void *GetCompiledEntryPoint() const
379     {
380         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
381         // should become visible
382         return compiledEntryPoint_.load(std::memory_order_acquire);
383     }
384 
SetCompiledEntryPoint(const void * entryPoint)385     void SetCompiledEntryPoint(const void *entryPoint)
386     {
387         // Atomic with release order reason: data race with compiled_entry_point_ with dependecies on writes before the
388         // store which should become visible acquire
389         compiledEntryPoint_.store(entryPoint, std::memory_order_release);
390     }
391 
SetInterpreterEntryPoint()392     void SetInterpreterEntryPoint()
393     {
394         if (!IsNative()) {
395             SetCompiledEntryPoint(GetCompiledCodeToInterpreterBridge(this));
396         }
397     }
398 
HasCompiledCode()399     bool HasCompiledCode() const
400     {
401         auto entryPoint = GetCompiledEntryPoint();
402         return entryPoint != GetCompiledCodeToInterpreterBridge() &&
403                entryPoint != GetCompiledCodeToInterpreterBridgeDyn();
404     }
405 
GetCompilationStatus()406     inline CompilationStage GetCompilationStatus() const
407     {
408         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
409         // should become visible
410         return static_cast<CompilationStage>((accessFlags_.load(std::memory_order_acquire) & COMPILATION_STATUS_MASK) >>
411                                              COMPILATION_STATUS_SHIFT);
412     }
413 
GetCompilationStatus(uint32_t value)414     inline CompilationStage GetCompilationStatus(uint32_t value)
415     {
416         return static_cast<CompilationStage>((value & COMPILATION_STATUS_MASK) >> COMPILATION_STATUS_SHIFT);
417     }
418 
SetCompilationStatus(enum CompilationStage newStatus)419     inline void SetCompilationStatus(enum CompilationStage newStatus)
420     {
421         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
422         // should become visible
423         auto result = (accessFlags_.load(std::memory_order_acquire) & ~COMPILATION_STATUS_MASK) |
424                       static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
425         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
426         // which should become visible acquire
427         accessFlags_.store(result, std::memory_order_release);
428     }
429 
AtomicSetCompilationStatus(enum CompilationStage oldStatus,enum CompilationStage newStatus)430     inline bool AtomicSetCompilationStatus(enum CompilationStage oldStatus, enum CompilationStage newStatus)
431     {
432         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
433         // should become visible
434         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
435         while (GetCompilationStatus(oldValue) == oldStatus) {
436             uint32_t newValue = MakeCompilationStatusValue(oldValue, newStatus);
437             if (accessFlags_.compare_exchange_strong(oldValue, newValue)) {
438                 return true;
439             }
440         }
441         return false;
442     }
443 
444     panda_file::Type GetReturnType() const;
445 
446     panda_file::File::StringData GetRefReturnType() const;
447 
448     // idx - index number of the argument in the signature
449     PANDA_PUBLIC_API panda_file::Type GetArgType(size_t idx) const;
450 
451     PANDA_PUBLIC_API panda_file::File::StringData GetRefArgType(size_t idx) const;
452 
453     template <typename Callback>
454     void EnumerateTypes(Callback handler) const;
455 
456     PANDA_PUBLIC_API panda_file::File::StringData GetName() const;
457 
458     PANDA_PUBLIC_API panda_file::File::StringData GetClassName() const;
459 
460     PANDA_PUBLIC_API PandaString GetFullName(bool withSignature = false) const;
461     PANDA_PUBLIC_API PandaString GetLineNumberAndSourceFile(uint32_t bcOffset) const;
462 
463     // CC-OFFNXT(G.INC.10) false positive: static method
464     static uint32_t GetFullNameHashFromString(const PandaString &str);
465     // CC-OFFNXT(G.INC.10) false positive: static method
466     static uint32_t GetClassNameHashFromString(const PandaString &str);
467 
468     PANDA_PUBLIC_API Proto GetProto() const;
469 
470     PANDA_PUBLIC_API ProtoId GetProtoId() const;
471 
GetFrameSize()472     size_t GetFrameSize() const
473     {
474         return Frame::GetAllocSize(GetNumArgs() + GetNumVregs(), EMPTY_EXT_FRAME_DATA_SIZE);
475     }
476 
477     uint32_t GetNumericalAnnotation(AnnotationField fieldId) const;
478     panda_file::File::StringData GetStringDataAnnotation(AnnotationField fieldId) const;
479 
GetAccessFlags()480     uint32_t GetAccessFlags() const
481     {
482         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
483         // should become visible
484         return accessFlags_.load(std::memory_order_acquire);
485     }
486 
SetAccessFlags(uint32_t accessFlags)487     void SetAccessFlags(uint32_t accessFlags)
488     {
489         // Atomic with release order reason: data race with access_flags_ with dependecies on writes before the store
490         // which should become visible acquire
491         accessFlags_.store(accessFlags, std::memory_order_release);
492     }
493 
IsStatic()494     bool IsStatic() const
495     {
496         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
497         // should become visible
498         return (accessFlags_.load(std::memory_order_acquire) & ACC_STATIC) != 0;
499     }
500 
IsNative()501     bool IsNative() const
502     {
503         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
504         // should become visible
505         return (accessFlags_.load(std::memory_order_acquire) & ACC_NATIVE) != 0;
506     }
507 
IsPublic()508     bool IsPublic() const
509     {
510         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
511         // should become visible
512         return (accessFlags_.load(std::memory_order_acquire) & ACC_PUBLIC) != 0;
513     }
514 
IsPrivate()515     bool IsPrivate() const
516     {
517         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
518         // should become visible
519         return (accessFlags_.load(std::memory_order_acquire) & ACC_PRIVATE) != 0;
520     }
521 
IsProtected()522     bool IsProtected() const
523     {
524         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
525         // should become visible
526         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROTECTED) != 0;
527     }
528 
IsIntrinsic()529     bool IsIntrinsic() const
530     {
531         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
532         // should become visible
533         return (accessFlags_.load(std::memory_order_acquire) & ACC_INTRINSIC) != 0;
534     }
535 
IsSynthetic()536     bool IsSynthetic() const
537     {
538         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
539         // should become visible
540         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNTHETIC) != 0;
541     }
542 
IsAbstract()543     bool IsAbstract() const
544     {
545         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
546         // should become visible
547         return (accessFlags_.load(std::memory_order_acquire) & ACC_ABSTRACT) != 0;
548     }
549 
IsFinal()550     bool IsFinal() const
551     {
552         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
553         // should become visible
554         return (accessFlags_.load(std::memory_order_acquire) & ACC_FINAL) != 0;
555     }
556 
IsSynchronized()557     bool IsSynchronized() const
558     {
559         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
560         // should become visible
561         return (accessFlags_.load(std::memory_order_acquire) & ACC_SYNCHRONIZED) != 0;
562     }
563 
HasVarArgs()564     bool HasVarArgs() const
565     {
566         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
567         // should become visible
568         return (accessFlags_.load(std::memory_order_acquire) & ACC_VARARGS) != 0;
569     }
570 
HasSingleImplementation()571     bool HasSingleImplementation() const
572     {
573         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
574         // should become visible
575         return (accessFlags_.load(std::memory_order_acquire) & ACC_SINGLE_IMPL) != 0;
576     }
577 
IsProfiled()578     bool IsProfiled() const
579     {
580         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
581         // should become visible
582         return (accessFlags_.load(std::memory_order_acquire) & ACC_PROFILING) != 0;
583     }
584 
IsDestroyed()585     bool IsDestroyed() const
586     {
587         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
588         // should become visible
589         return (accessFlags_.load(std::memory_order_acquire) & ACC_DESTROYED) != 0;
590     }
591 
SetHasSingleImplementation(bool v)592     void SetHasSingleImplementation(bool v)
593     {
594         if (v) {
595             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
596             // and on writes before the store
597             accessFlags_.fetch_or(ACC_SINGLE_IMPL, std::memory_order_acq_rel);
598         } else {
599             // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
600             // and on writes before the store
601             accessFlags_.fetch_and(~ACC_SINGLE_IMPL, std::memory_order_acq_rel);
602         }
603     }
604 
SetProfiled()605     void SetProfiled()
606     {
607         ASSERT(!IsIntrinsic());
608         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
609         // and on writes before the store
610         accessFlags_.fetch_or(ACC_PROFILING, std::memory_order_acq_rel);
611     }
612 
SetDestroyed()613     void SetDestroyed()
614     {
615         ASSERT(!IsIntrinsic());
616         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load
617         // and on writes before the store
618         accessFlags_.fetch_or(ACC_DESTROYED, std::memory_order_acq_rel);
619     }
620 
GetSingleImplementation()621     Method *GetSingleImplementation()
622     {
623         return HasSingleImplementation() ? this : nullptr;
624     }
625 
SetIntrinsic(intrinsics::Intrinsic intrinsic)626     void SetIntrinsic(intrinsics::Intrinsic intrinsic)
627     {
628         ASSERT(!IsIntrinsic());
629 
630         // Atomic with relaxed order reason: there is a release store below, which will make this store visible
631         // for other threads
632         intrinsicId_.store(static_cast<uint32_t>(intrinsic), std::memory_order_relaxed);
633         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
634         // on writes before the store
635         accessFlags_.fetch_or(ACC_INTRINSIC, std::memory_order_acq_rel);
636     }
637 
GetIntrinsic()638     intrinsics::Intrinsic GetIntrinsic() const
639     {
640         ASSERT(IsIntrinsic());
641         // Atomic with acquire order reason: data race with intrinsicId_ with dependecies on reads after the load which
642         // should become visible
643         return static_cast<intrinsics::Intrinsic>(intrinsicId_.load(std::memory_order_acquire));
644     }
645 
SetVTableIndex(uint16_t vtableIndex)646     void SetVTableIndex(uint16_t vtableIndex)
647     {
648         stor16Pair_.vtableIndex = vtableIndex;
649     }
650 
GetVTableIndex()651     uint16_t GetVTableIndex() const
652     {
653         return stor16Pair_.vtableIndex;
654     }
655 
SetNativePointer(void * nativePointer)656     void SetNativePointer(void *nativePointer)
657     {
658         ASSERT((IsNative() || IsProxy()));
659         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
660         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
661         pointer_.nativePointer.store(nativePointer, std::memory_order_relaxed);
662     }
663 
GetNativePointer()664     void *GetNativePointer() const
665     {
666         ASSERT((IsNative() || IsProxy()));
667         // Atomic with relaxed order reason: data race with native_pointer_ with no synchronization or ordering
668         // constraints imposed on other reads or writes NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
669         return pointer_.nativePointer.load(std::memory_order_relaxed);
670     }
671 
GetShorty()672     const uint16_t *GetShorty() const
673     {
674         return shorty_;
675     }
676 
677     uint32_t FindCatchBlockInPandaFile(const Class *cls, uint32_t pc) const;
678     uint32_t FindCatchBlock(const Class *cls, uint32_t pc) const;
679 
680     PANDA_PUBLIC_API panda_file::Type GetEffectiveArgType(size_t idx) const;
681 
682     PANDA_PUBLIC_API panda_file::Type GetEffectiveReturnType() const;
683 
SetIsDefaultInterfaceMethod()684     void SetIsDefaultInterfaceMethod()
685     {
686         // Atomic with acq_rel order reason: data race with access_flags_ with dependecies on reads after the load and
687         // on writes before the store
688         accessFlags_.fetch_or(ACC_DEFAULT_INTERFACE_METHOD, std::memory_order_acq_rel);
689     }
690 
IsDefaultInterfaceMethod()691     bool IsDefaultInterfaceMethod() const
692     {
693         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
694         // should become visible
695         return (accessFlags_.load(std::memory_order_acquire) & ACC_DEFAULT_INTERFACE_METHOD) != 0;
696     }
697 
IsConstructor()698     bool IsConstructor() const
699     {
700         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
701         // should become visible
702         return (accessFlags_.load(std::memory_order_acquire) & ACC_CONSTRUCTOR) != 0;
703     }
704 
IsInstanceConstructor()705     bool IsInstanceConstructor() const
706     {
707         return IsConstructor() && !IsStatic();
708     }
709 
IsStaticConstructor()710     bool IsStaticConstructor() const
711     {
712         return IsConstructor() && IsStatic();
713     }
714 
715     // CC-OFFNXT(G.INC.10) false positive: static method
GetAccessFlagsOffset()716     static constexpr uint32_t GetAccessFlagsOffset()
717     {
718         return MEMBER_OFFSET(Method, accessFlags_);
719     }
720     // CC-OFFNXT(G.INC.10) false positive: static method
GetNumArgsOffset()721     static constexpr uint32_t GetNumArgsOffset()
722     {
723         return MEMBER_OFFSET(Method, numArgs_);
724     }
725     // CC-OFFNXT(G.INC.10) false positive: static method
GetVTableIndexOffset()726     static constexpr uint32_t GetVTableIndexOffset()
727     {
728         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, vtableIndex);
729     }
730     // CC-OFFNXT(G.INC.10) false positive: static method
GetHotnessCounterOffset()731     static constexpr uint32_t GetHotnessCounterOffset()
732     {
733         return MEMBER_OFFSET(Method, stor16Pair_) + MEMBER_OFFSET(Storage16Pair, hotnessCounter);
734     }
735     // CC-OFFNXT(G.INC.10) false positive: static method
GetClassOffset()736     static constexpr uint32_t GetClassOffset()
737     {
738         return MEMBER_OFFSET(Method, classWord_);
739     }
740 
741     // CC-OFFNXT(G.INC.10) false positive: static method
GetCompiledEntryPointOffset()742     static constexpr uint32_t GetCompiledEntryPointOffset()
743     {
744         return MEMBER_OFFSET(Method, compiledEntryPoint_);
745     }
746     // CC-OFFNXT(G.INC.10) false positive: static method
GetPandaFileOffset()747     static constexpr uint32_t GetPandaFileOffset()
748     {
749         return MEMBER_OFFSET(Method, pandaFile_);
750     }
751     // CC-OFFNXT(G.INC.10) false positive: static method
GetCodeIdOffset()752     static constexpr uint32_t GetCodeIdOffset()
753     {
754         return MEMBER_OFFSET(Method, codeId_);
755     }
756     // CC-OFFNXT(G.INC.10) false positive: static method
GetNativePointerOffset()757     static constexpr uint32_t GetNativePointerOffset()
758     {
759         return MEMBER_OFFSET(Method, pointer_);
760     }
761     // CC-OFFNXT(G.INC.10) false positive: static method
GetShortyOffset()762     static constexpr uint32_t GetShortyOffset()
763     {
764         return MEMBER_OFFSET(Method, shorty_);
765     }
766 
767     template <typename Callback>
768     void EnumerateTryBlocks(Callback callback) const;
769 
770     template <typename Callback>
771     void EnumerateCatchBlocks(Callback callback) const;
772 
773     template <typename Callback>
774     void EnumerateExceptionHandlers(Callback callback) const;
775 
776     // CC-OFFNXT(G.INC.10) false positive: static method
CalcUniqId(const panda_file::File * file,panda_file::File::EntityId fileId)777     static inline UniqId CalcUniqId(const panda_file::File *file, panda_file::File::EntityId fileId)
778     {
779         constexpr uint64_t HALF = 32ULL;
780         uint64_t uid = file->GetUniqId();
781         uid <<= HALF;
782         uid |= fileId.GetOffset();
783         return uid;
784     }
785 
786     // for synthetic methods, like array .ctor
787     // CC-OFFNXT(G.INC.10) false positive: static method
788     static UniqId CalcUniqId(const uint8_t *classDescr, const uint8_t *name);
789 
GetUniqId()790     UniqId GetUniqId() const
791     {
792         return CalcUniqId(pandaFile_, fileId_);
793     }
794 
795     size_t GetLineNumFromBytecodeOffset(uint32_t bcOffset) const;
796 
797     panda_file::File::StringData GetClassSourceFile() const;
798 
799     inline bool InitProfilingData(ProfilingData *profilingData);
800 
801     PANDA_PUBLIC_API void StartProfiling();
802     PANDA_PUBLIC_API void StopProfiling();
803 
804     PANDA_PUBLIC_API bool IsProxy() const;
805 
GetProfilingData()806     ProfilingData *GetProfilingData()
807     {
808         if (UNLIKELY(IsNative() || IsProxy())) {
809             return nullptr;
810         }
811         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
812         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
813         return pointer_.profilingData.load(std::memory_order_acquire);
814     }
815 
GetProfilingDataWithoutCheck()816     ProfilingData *GetProfilingDataWithoutCheck()
817     {
818         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
819         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
820         return pointer_.profilingData.load(std::memory_order_acquire);
821     }
822 
GetProfilingData()823     const ProfilingData *GetProfilingData() const
824     {
825         if (UNLIKELY(IsNative() || IsProxy())) {
826             return nullptr;
827         }
828         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
829         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
830         return pointer_.profilingData.load(std::memory_order_acquire);
831     }
832 
IsProfiling()833     bool IsProfiling() const
834     {
835         return GetProfilingData() != nullptr;
836     }
837 
IsProfilingWithoutLock()838     bool IsProfilingWithoutLock() const
839     {
840         if (UNLIKELY(IsNative() || IsProxy())) {
841             return false;
842         }
843         // Atomic with acquire order reason: data race with profiling_data_ with dependecies on reads after the load
844         // which should become visible NOLINTNEXTLINE(cppcoreguidelines-pro-type-union-access)
845         return pointer_.profilingData.load(std::memory_order_acquire) != nullptr;
846     }
847 
848     void SetVerified(bool result);
849     bool IsVerified() const;
850     PANDA_PUBLIC_API bool Verify();
851     template <bool IS_CALL>
852     bool TryVerify();
853 
854     // CC-OFFNXT(G.INC.10) false positive: static method
GetVerificationStage(uint32_t value)855     inline static VerificationStage GetVerificationStage(uint32_t value)
856     {
857         return static_cast<VerificationStage>((value & VERIFICATION_STATUS_MASK) >> VERIFICATION_STATUS_SHIFT);
858     }
859 
GetVerificationStage()860     inline VerificationStage GetVerificationStage() const
861     {
862         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
863         // should become visible
864         return GetVerificationStage(accessFlags_.load(std::memory_order_acquire));
865     }
866 
SetVerificationStage(enum VerificationStage newStage)867     inline void SetVerificationStage(enum VerificationStage newStage)
868     {
869         // Atomic with acquire order reason: data race with access_flags_ with dependecies on reads after the load which
870         // should become visible
871         uint32_t oldValue = accessFlags_.load(std::memory_order_acquire);
872         uint32_t newValue = MakeVerificationStageValue(oldValue, newStage);
873         while (!accessFlags_.compare_exchange_weak(oldValue, newValue, std::memory_order_acq_rel)) {
874             newValue = MakeVerificationStageValue(oldValue, newStage);
875         }
876     }
877 
878 private:
879     inline void FillVecsByInsts(BytecodeInstruction &inst, PandaVector<uint32_t> &vcalls,
880                                 PandaVector<uint32_t> &branches, PandaVector<uint32_t> &throws) const;
881 
882     Value InvokeCompiledCode(ManagedThread *thread, uint32_t numArgs, Value *args);
883 
GetReturnValueFromTaggedValue(uint64_t retValue)884     Value GetReturnValueFromTaggedValue(uint64_t retValue)
885     {
886         panda_file::Type retType = GetReturnType();
887         if (retType.GetId() == panda_file::Type::TypeId::VOID) {
888             return Value(static_cast<int64_t>(0));
889         }
890         if (retType.GetId() == panda_file::Type::TypeId::REFERENCE) {
891             return Value(reinterpret_cast<ObjectHeader *>(retValue));
892         }
893         return Value(retValue);
894     }
895 
896     // CC-OFFNXT(G.INC.10) false positive: static method
MakeCompilationStatusValue(uint32_t value,CompilationStage newStatus)897     inline static uint32_t MakeCompilationStatusValue(uint32_t value, CompilationStage newStatus)
898     {
899         value &= ~COMPILATION_STATUS_MASK;
900         value |= static_cast<uint32_t>(newStatus) << COMPILATION_STATUS_SHIFT;
901         return value;
902     }
903 
904     // CC-OFFNXT(G.INC.10) false positive: static method
MakeVerificationStageValue(uint32_t value,VerificationStage newStage)905     inline static uint32_t MakeVerificationStageValue(uint32_t value, VerificationStage newStage)
906     {
907         value &= ~VERIFICATION_STATUS_MASK;
908         value |= static_cast<uint32_t>(newStage) << VERIFICATION_STATUS_SHIFT;
909         return value;
910     }
911 
912     template <class InvokeHelper, class ValueT>
913     ValueT InvokeInterpretedCode(ManagedThread *thread, uint32_t numActualArgs, ValueT *args);
914 
915     template <class InvokeHelper, class ValueT>
916     PandaUniquePtr<Frame, FrameDeleter> InitFrame(ManagedThread *thread, uint32_t numActualArgs, ValueT *args,
917                                                   Frame *currentFrame);
918 
919     template <class InvokeHelper, class ValueT, bool IS_NATIVE_METHOD>
920     PandaUniquePtr<Frame, FrameDeleter> InitFrameWithNumVRegs(ManagedThread *thread, uint32_t numVregs,
921                                                               uint32_t numActualArgs, ValueT *args,
922                                                               Frame *currentFrame);
923 
924     template <class InvokeHelper, class ValueT>
925     ValueT GetReturnValueFromException();
926 
927     template <class InvokeHelper, class ValueT>
928     ValueT GetReturnValueFromAcc(interpreter::AccVRegister &aacVreg);
929 
930     template <class InvokeHelper, class ValueT>
931     ValueT InvokeImpl(ManagedThread *thread, uint32_t numActualArgs, ValueT *args, bool proxyCall);
932 
933     template <bool IS_CALL>
934     inline bool DecrementHotnessCounterForTaggedFunction(ManagedThread *thread, uintptr_t bcOffset, bool osr,
935                                                          coretypes::TaggedValue func);
936 
937 private:
938     using IntrinsicIdType = uint32_t;
939     static_assert(std::numeric_limits<IntrinsicIdType>::max() == MAX_INTRINSIC_NUMBER);
940 
941     union PointerInMethod {
942         // It's native pointer when the method is native or proxy method.
943         std::atomic<void *> nativePointer;
944         // It's profiling data when the method isn't native or proxy method.
945         std::atomic<ProfilingData *> profilingData;
946     };
947 
948     struct Storage16Pair {
949         uint16_t vtableIndex;
950         int16_t hotnessCounter;
951     };
952 
953     std::atomic_uint32_t accessFlags_;
954     uint32_t numArgs_;
955     Storage16Pair stor16Pair_;
956     ClassHelper::ClassWordSize classWord_;
957 
958     std::atomic<const void *> compiledEntryPoint_ {nullptr};
959     const panda_file::File *pandaFile_;
960     union PointerInMethod pointer_ {
961     };
962 
963     panda_file::File::EntityId fileId_;
964     panda_file::File::EntityId codeId_;
965     const uint16_t *shorty_;
966 
967     std::atomic<IntrinsicIdType> intrinsicId_ {0U};
968 };
969 
970 static_assert(!std::is_polymorphic_v<Method>);
971 
972 }  // namespace ark
973 
974 #endif  // PANDA_RUNTIME_METHOD_H_
975