• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef PANDA_RUNTIME_JIT_PROFILING_DATA_H_
17 #define PANDA_RUNTIME_JIT_PROFILING_DATA_H_
18 
19 #include "macros.h"
20 #include <array>
21 #include <numeric>
22 
23 #include <cstdint>
24 
25 namespace panda {
26 
27 class Class;
28 
29 class CallSiteInlineCache {
30 public:
31     static constexpr size_t CLASSES_COUNT = 4;
32     static constexpr uintptr_t MEGAMORPHIC_FLAG = static_cast<uintptr_t>(-1);
33 
CallSiteInlineCache(uintptr_t pc)34     explicit CallSiteInlineCache(uintptr_t pc) : bytecode_pc_(pc) {}
35     ~CallSiteInlineCache() = default;
36     NO_MOVE_SEMANTIC(CallSiteInlineCache);
37     NO_COPY_SEMANTIC(CallSiteInlineCache);
38 
Init(uintptr_t pc)39     void Init(uintptr_t pc)
40     {
41         SetBytecodePc(pc);
42         std::fill(classes_.begin(), classes_.end(), nullptr);
43     }
44 
UpdateInlineCaches(Class * cls)45     void UpdateInlineCaches(Class *cls)
46     {
47         for (uint32_t i = 0; i < classes_.size();) {
48             auto *class_atomic = reinterpret_cast<std::atomic<Class *> *>(&(classes_[i]));
49             auto stored_class = class_atomic->load(std::memory_order_acquire);
50             // Check that the call is already megamorphic
51             if (i == 0 && stored_class == reinterpret_cast<Class *>(MEGAMORPHIC_FLAG)) {
52                 return;
53             }
54             if (stored_class == cls) {
55                 return;
56             }
57             if (stored_class == nullptr) {
58                 if (!class_atomic->compare_exchange_weak(stored_class, cls, std::memory_order_acq_rel)) {
59                     continue;
60                 }
61                 return;
62             }
63             i++;
64         }
65         // Megamorphic call, disable devirtualization for this call site.
66         auto *class_atomic = reinterpret_cast<std::atomic<Class *> *>(&(classes_[0]));
67         class_atomic->store(reinterpret_cast<Class *>(MEGAMORPHIC_FLAG), std::memory_order_release);
68     }
69 
GetBytecodePc()70     auto GetBytecodePc() const
71     {
72         return bytecode_pc_.load(std::memory_order_acquire);
73     }
74 
SetBytecodePc(uintptr_t pc)75     void SetBytecodePc(uintptr_t pc)
76     {
77         bytecode_pc_.store(pc, std::memory_order_release);
78     }
79 
GetClasses()80     auto GetClasses()
81     {
82         return Span<Class *>(classes_.data(), GetClassesCount());
83     }
84 
GetClassesCount()85     size_t GetClassesCount() const
86     {
87         size_t classes_count = 0;
88         for (uint32_t i = 0; i < classes_.size();) {
89             auto *class_atomic = reinterpret_cast<std::atomic<Class *> const *>(&(classes_[i]));
90             auto stored_class = class_atomic->load(std::memory_order_acquire);
91             if (stored_class != nullptr) {
92                 classes_count++;
93             }
94             i++;
95         }
96         return classes_count;
97     }
98 
IsMegamorphic(Class * cls)99     static bool IsMegamorphic(Class *cls)
100     {
101         auto *class_atomic = reinterpret_cast<std::atomic<Class *> *>(&cls);
102         return class_atomic->load(std::memory_order_acquire) == reinterpret_cast<Class *>(MEGAMORPHIC_FLAG);
103     }
104 
105 private:
106     std::atomic_uintptr_t bytecode_pc_;
107     std::array<Class *, CLASSES_COUNT> classes_ {};
108 };
109 
110 class ProfilingData {
111 public:
ProfilingData(size_t inline_caches_num)112     explicit ProfilingData(size_t inline_caches_num) : inline_caches_num_(inline_caches_num)
113     {
114         auto data = GetInlineCaches().SubSpan<uint8_t>(0, GetInlineCaches().size());
115         std::fill(data.begin(), data.end(), 0);
116     }
117     ~ProfilingData() = default;
118     NO_MOVE_SEMANTIC(ProfilingData);
119     NO_COPY_SEMANTIC(ProfilingData);
120 
GetInlineCaches()121     Span<CallSiteInlineCache> GetInlineCaches()
122     {
123         return Span<CallSiteInlineCache>(inline_caches_, inline_caches_num_);
124     }
125 
FindInlineCache(uintptr_t pc)126     CallSiteInlineCache *FindInlineCache(uintptr_t pc)
127     {
128         auto ics = GetInlineCaches();
129         auto ic = std::lower_bound(ics.begin(), ics.end(), pc,
130                                    [](const auto &a, uintptr_t counter) { return a.GetBytecodePc() < counter; });
131         return (ic == ics.end() || ic->GetBytecodePc() != pc) ? nullptr : &*ic;
132     }
133 
UpdateInlineCaches(uintptr_t pc,Class * cls)134     void UpdateInlineCaches(uintptr_t pc, Class *cls)
135     {
136         auto ic = FindInlineCache(pc);
137         ASSERT(ic != nullptr);
138         if (ic != nullptr) {
139             ic->UpdateInlineCaches(cls);
140         }
141     }
142 
143 private:
144     size_t inline_caches_num_ {};
145     __extension__ CallSiteInlineCache inline_caches_[0];  // NOLINT(modernize-avoid-c-arrays)
146 };
147 
148 }  // namespace panda
149 
150 #endif  // PANDA_RUNTIME_JIT_PROFILING_DATA_H_
151