1 /*
2 * Copyright (c) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef PANDA_LIBPANDABASE_MEM_MEM_H_
17 #define PANDA_LIBPANDABASE_MEM_MEM_H_
18
19 #include "macros.h"
20 #include "utils/math_helpers.h"
21
22 #include <cstddef>
23 #include <cstdint>
24 #include <cmath>
25 #include <functional>
26
27 namespace panda {
28
29 namespace mem {
30 class GCRoot;
31
32 class MemStatsAdditionalInfo;
33 class MemStatsDefault;
34 class MemRange;
35
36 #ifndef NDEBUG
37 using MemStatsType = MemStatsAdditionalInfo;
38 #else
39 using MemStatsType = MemStatsDefault;
40 #endif
41 } // namespace mem
42
43 class ObjectHeader;
44
45 #ifdef PANDA_USE_32_BIT_POINTER
46 using object_pointer_type = uint32_t;
47 #else
48 using object_pointer_type = uintptr_t;
49 #endif
50
51 constexpr size_t OBJECT_POINTER_SIZE = sizeof(object_pointer_type);
52
53 /**
54 * \brief Logarithmic/bit alignment
55 */
56 enum Alignment {
57 LOG_ALIGN_2 = 2,
58 LOG_ALIGN_3 = 3,
59 LOG_ALIGN_4 = 4,
60 LOG_ALIGN_5 = 5,
61 LOG_ALIGN_6 = 6,
62 LOG_ALIGN_7 = 7,
63 LOG_ALIGN_8 = 8,
64 LOG_ALIGN_9 = 9,
65 LOG_ALIGN_10 = 10,
66 LOG_ALIGN_11 = 11,
67 LOG_ALIGN_12 = 12,
68 LOG_ALIGN_13 = 13,
69 LOG_ALIGN_MIN = LOG_ALIGN_2,
70 LOG_ALIGN_MAX = LOG_ALIGN_13,
71 };
72
73 /**
74 * \brief gets alignment in bytes.
75 * @param logAlignment - logarithmic alignment
76 * @return alignment in bytes
77 */
GetAlignmentInBytes(const Alignment LOG_ALIGNMENT)78 constexpr size_t GetAlignmentInBytes(const Alignment LOG_ALIGNMENT)
79 {
80 return 1U << static_cast<uint32_t>(LOG_ALIGNMENT);
81 }
82
83 /**
84 * \brief returns log2 for alignment in bytes
85 * @param ALIGNMENT_IN_BYTES - should be a power of 2
86 * @return alignment in bits
87 */
GetLogAlignment(const uint32_t ALIGNMENT_IN_BYTES)88 constexpr Alignment GetLogAlignment(const uint32_t ALIGNMENT_IN_BYTES)
89 {
90 using helpers::math::GetIntLog2;
91 // check if it is a power of 2
92 ASSERT((ALIGNMENT_IN_BYTES != 0) && !(ALIGNMENT_IN_BYTES & (ALIGNMENT_IN_BYTES - 1)));
93 ASSERT(GetIntLog2(ALIGNMENT_IN_BYTES) >= Alignment::LOG_ALIGN_MIN);
94 ASSERT(GetIntLog2(ALIGNMENT_IN_BYTES) <= Alignment::LOG_ALIGN_MAX);
95 return static_cast<Alignment>(GetIntLog2(ALIGNMENT_IN_BYTES));
96 }
97
AlignUp(size_t value,size_t alignment)98 constexpr size_t AlignUp(size_t value, size_t alignment)
99 {
100 return (value + alignment - 1U) & ~(alignment - 1U);
101 }
102
AlignDown(size_t value,size_t alignment)103 constexpr size_t AlignDown(size_t value, size_t alignment)
104 {
105 return value & ~(alignment - 1U);
106 }
107
108 template <class T>
ToUintPtr(T * val)109 inline uintptr_t ToUintPtr(T *val)
110 {
111 return reinterpret_cast<uintptr_t>(val);
112 }
113
ToUintPtr(std::nullptr_t)114 inline uintptr_t ToUintPtr(std::nullptr_t)
115 {
116 return reinterpret_cast<uintptr_t>(nullptr);
117 }
118
119 template <class T>
ToNativePtr(uintptr_t val)120 inline T *ToNativePtr(uintptr_t val)
121 {
122 return reinterpret_cast<T *>(val);
123 }
124
ToVoidPtr(uintptr_t val)125 inline void *ToVoidPtr(uintptr_t val)
126 {
127 return reinterpret_cast<void *>(val);
128 }
129
130 constexpr Alignment DEFAULT_ALIGNMENT = GetLogAlignment(alignof(uintptr_t));
131 constexpr size_t DEFAULT_ALIGNMENT_IN_BYTES = GetAlignmentInBytes(DEFAULT_ALIGNMENT);
132
GetAlignedObjectSize(size_t size)133 constexpr size_t GetAlignedObjectSize(size_t size)
134 {
135 return AlignUp(size, DEFAULT_ALIGNMENT_IN_BYTES);
136 }
137
138 /*
139 uint64_t return type usage in memory literals for giving
140 compile-time error in case of integer overflow
141 */
142
143 constexpr uint64_t SHIFT_KB = 10ULL;
144 constexpr uint64_t SHIFT_MB = 20ULL;
145 constexpr uint64_t SHIFT_GB = 30ULL;
146
147 constexpr uint64_t operator"" _KB(long double count)
148 {
149 return count * (1ULL << SHIFT_KB);
150 }
151
152 // NOLINTNEXTLINE(google-runtime-int)
153 constexpr uint64_t operator"" _KB(unsigned long long count)
154 {
155 return count * (1ULL << SHIFT_KB);
156 }
157
158 constexpr uint64_t operator"" _MB(long double count)
159 {
160 return count * (1ULL << SHIFT_MB);
161 }
162
163 // NOLINTNEXTLINE(google-runtime-int)
164 constexpr uint64_t operator"" _MB(unsigned long long count)
165 {
166 return count * (1ULL << SHIFT_MB);
167 }
168
169 constexpr uint64_t operator"" _GB(long double count)
170 {
171 return count * (1ULL << SHIFT_GB);
172 }
173
174 // NOLINTNEXTLINE(google-runtime-int)
175 constexpr uint64_t operator"" _GB(unsigned long long count)
176 {
177 return count * (1ULL << SHIFT_GB);
178 }
179
180 constexpr uint64_t SIZE_1K = 1_KB;
181 constexpr uint64_t SIZE_1M = 1_MB;
182 constexpr uint64_t SIZE_1G = 1_GB;
183
184 constexpr uint64_t PANDA_MAX_HEAP_SIZE = 4_GB;
185 constexpr size_t PANDA_POOL_ALIGNMENT_IN_BYTES = 256_KB;
186
187 constexpr size_t PANDA_DEFAULT_POOL_SIZE = 1_MB;
188 constexpr size_t PANDA_DEFAULT_ARENA_SIZE = 1_MB;
189 constexpr size_t PANDA_DEFAULT_ALLOCATOR_POOL_SIZE = 4_MB;
190 static_assert(PANDA_DEFAULT_POOL_SIZE % PANDA_POOL_ALIGNMENT_IN_BYTES == 0);
191 static_assert(PANDA_DEFAULT_ARENA_SIZE % PANDA_POOL_ALIGNMENT_IN_BYTES == 0);
192 static_assert(PANDA_DEFAULT_ALLOCATOR_POOL_SIZE % PANDA_POOL_ALIGNMENT_IN_BYTES == 0);
193
194 static constexpr Alignment DEFAULT_FRAME_ALIGNMENT = LOG_ALIGN_6;
195
196 constexpr uintptr_t PANDA_32BITS_HEAP_START_ADDRESS = AlignUp(72_KB, PANDA_POOL_ALIGNMENT_IN_BYTES);
197 constexpr uint64_t PANDA_32BITS_HEAP_END_OBJECTS_ADDRESS = 4_GB;
198
IsInObjectsAddressSpace(uintptr_t address)199 inline bool IsInObjectsAddressSpace([[maybe_unused]] uintptr_t address)
200 {
201 #ifdef PANDA_USE_32_BIT_POINTER
202 return address == ToUintPtr(nullptr) ||
203 (address >= PANDA_32BITS_HEAP_START_ADDRESS && address < PANDA_32BITS_HEAP_END_OBJECTS_ADDRESS);
204 #else // In this case, all 64 bits addresses are valid
205 return true;
206 #endif
207 }
208
209 template <class T>
ToObjPtrType(T * val)210 inline object_pointer_type ToObjPtrType(T *val)
211 {
212 ASSERT(IsInObjectsAddressSpace(ToUintPtr(val)));
213 return static_cast<object_pointer_type>(ToUintPtr(val));
214 }
215
ToObjPtrType(std::nullptr_t)216 inline object_pointer_type ToObjPtrType(std::nullptr_t)
217 {
218 return static_cast<object_pointer_type>(ToUintPtr(nullptr));
219 }
220
221 enum class ObjectStatus : bool {
222 DEAD_OBJECT,
223 ALIVE_OBJECT,
224 };
225
226 using MemVisitor = std::function<void(void *mem, size_t size)>;
227 using GCObjectVisitor = std::function<ObjectStatus(ObjectHeader *)>;
228 using ObjectMoveVisitor = std::add_pointer<size_t(void *mem)>::type;
229 using ObjectVisitor = std::function<void(ObjectHeader *)>;
230 /**
231 * from_object is object from which we found to_object by reference.
232 */
233 using ObjectVisitorEx = std::function<void(ObjectHeader *from_object, ObjectHeader *to_object)>;
234 using ObjectChecker = std::function<bool(const ObjectHeader *)>;
235 using GCRootVisitor = std::function<void(const mem::GCRoot &)>;
236 using MemRangeChecker = std::function<bool(mem::MemRange &)>;
237
NoFilterChecker(const ObjectHeader * object_header)238 inline bool NoFilterChecker([[maybe_unused]] const ObjectHeader *object_header)
239 {
240 return true;
241 }
242
GCKillEmAllVisitor(ObjectHeader * mem)243 inline ObjectStatus GCKillEmAllVisitor([[maybe_unused]] ObjectHeader *mem)
244 {
245 return ObjectStatus::DEAD_OBJECT;
246 }
247
248 } // namespace panda
249
250 #endif // PANDA_LIBPANDABASE_MEM_MEM_H_
251