1 /**
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef LIBPANDABASE_MEM_H
17 #define LIBPANDABASE_MEM_H
18
19 #include "macros.h"
20 #include "utils/math_helpers.h"
21
22 #include <cstddef>
23 #include <cstdint>
24 #include <cmath>
25 #include <functional>
26
27 namespace ark {
28
29 namespace mem {
30 class GCRoot;
31
32 class MemStatsAdditionalInfo;
33 class MemStatsDefault;
34 class MemRange;
35
36 #ifndef NDEBUG
37 using MemStatsType = MemStatsAdditionalInfo;
38 #else
39 using MemStatsType = MemStatsDefault;
40 #endif
41 } // namespace mem
42
43 class ObjectHeader;
44
45 #ifdef PANDA_USE_32_BIT_POINTER
46 using ObjectPointerType = uint32_t;
47 #else
48 using ObjectPointerType = uintptr_t;
49 #endif
50
51 constexpr size_t OBJECT_POINTER_SIZE = sizeof(ObjectPointerType);
52
53 /// @brief Logarithmic/bit alignment
54 enum Alignment {
55 LOG_ALIGN_2 = 2,
56 LOG_ALIGN_3 = 3,
57 LOG_ALIGN_4 = 4,
58 LOG_ALIGN_5 = 5,
59 LOG_ALIGN_6 = 6,
60 LOG_ALIGN_7 = 7,
61 LOG_ALIGN_8 = 8,
62 LOG_ALIGN_9 = 9,
63 LOG_ALIGN_10 = 10,
64 LOG_ALIGN_11 = 11,
65 LOG_ALIGN_12 = 12,
66 LOG_ALIGN_13 = 13,
67 LOG_ALIGN_MIN = LOG_ALIGN_2,
68 LOG_ALIGN_MAX = LOG_ALIGN_13,
69 };
70
71 /**
72 * @param logAlignment - logarithmic alignment
73 * @return alingnment in bytes
74 */
GetAlignmentInBytes(const Alignment logAlignment)75 constexpr size_t GetAlignmentInBytes(const Alignment logAlignment)
76 {
77 return 1U << static_cast<uint32_t>(logAlignment);
78 }
79
80 /**
81 * @brief returns log2 for alignment in bytes
82 * @param ALIGNMENT_IN_BYTES - should be power of 2
83 * @return alignment in bits
84 */
GetLogAlignment(const uint32_t alignmentInBytes)85 constexpr Alignment GetLogAlignment(const uint32_t alignmentInBytes)
86 {
87 using helpers::math::GetIntLog2;
88 // check if it is power of 2
89 ASSERT((alignmentInBytes != 0) && !(alignmentInBytes & (alignmentInBytes - 1)));
90 ASSERT(GetIntLog2(alignmentInBytes) >= Alignment::LOG_ALIGN_MIN);
91 ASSERT(GetIntLog2(alignmentInBytes) <= Alignment::LOG_ALIGN_MAX);
92 return static_cast<Alignment>(GetIntLog2(alignmentInBytes));
93 }
94
95 template <class T>
AlignUp(T value,size_t alignment)96 constexpr std::enable_if_t<std::is_unsigned_v<T>, T> AlignUp(T value, size_t alignment)
97 {
98 return (value + alignment - 1U) & ~(alignment - 1U);
99 }
100
101 template <class T>
AlignDown(T value,size_t alignment)102 constexpr std::enable_if_t<std::is_unsigned_v<T>, T> AlignDown(T value, size_t alignment)
103 {
104 return value & ~(alignment - 1U);
105 }
106
107 template <class T>
ToUintPtr(T * val)108 constexpr uintptr_t ToUintPtr(T *val)
109 {
110 return reinterpret_cast<uintptr_t>(val);
111 }
112
ToUintPtr(std::nullptr_t)113 constexpr uintptr_t ToUintPtr(std::nullptr_t)
114 {
115 return ToUintPtr(static_cast<void *>(nullptr));
116 }
117
118 template <class T>
ToNativePtr(uintptr_t val)119 constexpr T *ToNativePtr(uintptr_t val)
120 {
121 return reinterpret_cast<T *>(val);
122 }
123
ToVoidPtr(uintptr_t val)124 inline void *ToVoidPtr(uintptr_t val)
125 {
126 return reinterpret_cast<void *>(val);
127 }
128
ToObjPtr(const void * ptr)129 constexpr ObjectPointerType ToObjPtr(const void *ptr)
130 {
131 return static_cast<ObjectPointerType>(ToUintPtr(ptr));
132 }
133
134 // Managed languages may create double, atomic i64, NaN-tagged and similar object fields,
135 // thus 64-bit object alignment is required
136 constexpr Alignment DEFAULT_ALIGNMENT = GetLogAlignment(sizeof(uint64_t));
137 constexpr size_t DEFAULT_ALIGNMENT_IN_BYTES = GetAlignmentInBytes(DEFAULT_ALIGNMENT);
138
139 // Internal objects do not all need to be 64 bits aligned
140 // this alignment helps to reduce memory usage on 32 bits machines
141 constexpr Alignment DEFAULT_INTERNAL_ALIGNMENT = GetLogAlignment(sizeof(uintptr_t));
142 constexpr size_t DEFAULT_INTERNAL_ALIGNMENT_IN_BYTES = GetAlignmentInBytes(DEFAULT_INTERNAL_ALIGNMENT);
143
GetAlignedObjectSize(size_t size)144 constexpr size_t GetAlignedObjectSize(size_t size)
145 {
146 return AlignUp(size, DEFAULT_ALIGNMENT_IN_BYTES);
147 }
148
149 template <typename T>
GetAlignment()150 constexpr Alignment GetAlignment()
151 {
152 return GetLogAlignment(std::max(alignof(T), DEFAULT_ALIGNMENT_IN_BYTES));
153 }
154
GetAlignment(size_t align)155 constexpr Alignment GetAlignment(size_t align)
156 {
157 return GetLogAlignment(std::max(align, DEFAULT_ALIGNMENT_IN_BYTES));
158 }
159
160 template <typename T>
GetInternalAlignment()161 constexpr Alignment GetInternalAlignment()
162 {
163 return GetLogAlignment(std::max(alignof(T), DEFAULT_INTERNAL_ALIGNMENT_IN_BYTES));
164 }
165
GetInternalAlignment(size_t align)166 constexpr Alignment GetInternalAlignment(size_t align)
167 {
168 return GetLogAlignment(std::max(align, DEFAULT_INTERNAL_ALIGNMENT_IN_BYTES));
169 }
170
171 /*
172 uint64_t return type usage in memory literals for giving
173 compile-time error in case of integer overflow
174 */
175
176 constexpr uint64_t SHIFT_KB = 10ULL;
177 constexpr uint64_t SHIFT_MB = 20ULL;
178 constexpr uint64_t SHIFT_GB = 30ULL;
179
180 constexpr uint64_t operator"" _KB(long double count)
181 {
182 return static_cast<uint64_t>(count * (1ULL << SHIFT_KB));
183 }
184
185 // NOLINTNEXTLINE(google-runtime-int)
186 constexpr uint64_t operator"" _KB(unsigned long long count)
187 {
188 return count * (1ULL << SHIFT_KB);
189 }
190
191 constexpr uint64_t operator"" _MB(long double count)
192 {
193 return static_cast<uint64_t>(count * (1ULL << SHIFT_MB));
194 }
195
196 // NOLINTNEXTLINE(google-runtime-int)
197 constexpr uint64_t operator"" _MB(unsigned long long count)
198 {
199 return count * (1ULL << SHIFT_MB);
200 }
201
202 constexpr uint64_t operator"" _GB(long double count)
203 {
204 return static_cast<uint64_t>(count * (1ULL << SHIFT_GB));
205 }
206
207 // NOLINTNEXTLINE(google-runtime-int)
208 constexpr uint64_t operator"" _GB(unsigned long long count)
209 {
210 return count * (1ULL << SHIFT_GB);
211 }
212
213 constexpr uint64_t SIZE_1K = 1_KB;
214 constexpr uint64_t SIZE_1M = 1_MB;
215 constexpr uint64_t SIZE_1G = 1_GB;
216
217 constexpr uint64_t PANDA_MAX_HEAP_SIZE = 4_GB;
218 constexpr size_t PANDA_POOL_ALIGNMENT_IN_BYTES = 256_KB;
219
220 constexpr size_t PANDA_DEFAULT_POOL_SIZE = 1_MB;
221 constexpr size_t PANDA_DEFAULT_ARENA_SIZE = 1_MB;
222 constexpr size_t PANDA_DEFAULT_ALLOCATOR_POOL_SIZE = 4_MB;
223 static_assert(PANDA_DEFAULT_POOL_SIZE % PANDA_POOL_ALIGNMENT_IN_BYTES == 0);
224 static_assert(PANDA_DEFAULT_ARENA_SIZE % PANDA_POOL_ALIGNMENT_IN_BYTES == 0);
225 static_assert(PANDA_DEFAULT_ALLOCATOR_POOL_SIZE % PANDA_POOL_ALIGNMENT_IN_BYTES == 0);
226
227 constexpr Alignment DEFAULT_FRAME_ALIGNMENT = LOG_ALIGN_6;
228
229 constexpr uintptr_t PANDA_32BITS_HEAP_START_ADDRESS = AlignUp(1U, PANDA_POOL_ALIGNMENT_IN_BYTES);
230 constexpr uint64_t PANDA_32BITS_HEAP_END_OBJECTS_ADDRESS = 4_GB;
231
IsAddressInObjectsHeap(uintptr_t address)232 constexpr bool IsAddressInObjectsHeap([[maybe_unused]] uintptr_t address)
233 {
234 #ifdef PANDA_USE_32_BIT_POINTER
235 return PANDA_32BITS_HEAP_START_ADDRESS <= address && address < PANDA_32BITS_HEAP_END_OBJECTS_ADDRESS;
236 #else // In this case, all 64 bits addresses are valid
237 return true;
238 #endif
239 }
240
241 template <class T>
IsAddressInObjectsHeap(const T * address)242 constexpr bool IsAddressInObjectsHeap(const T *address)
243 {
244 return IsAddressInObjectsHeap(ToUintPtr(address));
245 }
246
IsAddressInObjectsHeapOrNull(uintptr_t address)247 constexpr bool IsAddressInObjectsHeapOrNull(uintptr_t address)
248 {
249 return address == ToUintPtr(nullptr) || IsAddressInObjectsHeap(address);
250 }
251
252 template <class T>
IsAddressInObjectsHeapOrNull(const T * address)253 constexpr bool IsAddressInObjectsHeapOrNull(const T *address)
254 {
255 return IsAddressInObjectsHeapOrNull(ToUintPtr(address));
256 }
257
258 template <class T>
ToObjPtrType(const T * val)259 constexpr ObjectPointerType ToObjPtrType(const T *val)
260 {
261 ASSERT(IsAddressInObjectsHeapOrNull(ToUintPtr(val)));
262 return static_cast<ObjectPointerType>(ToUintPtr(val));
263 }
264
ToObjPtrType(std::nullptr_t)265 constexpr ObjectPointerType ToObjPtrType(std::nullptr_t)
266 {
267 return static_cast<ObjectPointerType>(ToUintPtr(nullptr));
268 }
269
270 enum class ObjectStatus : bool {
271 DEAD_OBJECT,
272 ALIVE_OBJECT,
273 };
274
275 using MemVisitor = std::function<void(void *mem, size_t size)>;
276 using GCObjectVisitor = std::function<ObjectStatus(ObjectHeader *)>;
277 using ObjectMoveVisitor = std::add_pointer<size_t(void *mem)>::type;
278 using ObjectVisitor = std::function<void(ObjectHeader *)>;
279 /// from_object is object from which we found to_object by reference.
280 using ObjectVisitorEx = std::function<void(ObjectHeader *fromObject, ObjectHeader *toObject)>;
281 using ObjectChecker = std::function<bool(const ObjectHeader *)>;
282 using GCRootVisitor = std::function<void(const mem::GCRoot &)>;
283 using MemRangeChecker = std::function<bool(mem::MemRange &)>;
284
NoFilterChecker(const ObjectHeader * objectHeader)285 inline bool NoFilterChecker([[maybe_unused]] const ObjectHeader *objectHeader)
286 {
287 return true;
288 }
289
GCKillEmAllVisitor(const ObjectHeader * mem)290 inline ObjectStatus GCKillEmAllVisitor([[maybe_unused]] const ObjectHeader *mem)
291 {
292 return ObjectStatus::DEAD_OBJECT;
293 }
294
295 } // namespace ark
296
297 // If the OS has this macro, do not redefine it.
298 #ifndef PAGE_SIZE
299 // NB! Keep inline to avoid ODR-violation
300 inline constexpr size_t PAGE_SIZE = 4096U;
301 #endif
302
303 #endif // LIBPANDABASE_MEM_H
304