1 /*
2 * Copyright (c) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "include/runtime.h"
17 #include "runtime/mem/internal_allocator-inl.h"
18 #include "runtime/include/thread.h"
19
20 namespace panda::mem {
21
22 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
23 #define LOG_INTERNAL_ALLOCATOR(level) LOG(level, ALLOC) << "InternalAllocator: "
24
25 #if defined(TRACK_INTERNAL_ALLOCATIONS)
CreateAllocTracker()26 static AllocTracker *CreateAllocTracker()
27 {
28 static constexpr int SIMPLE_ALLOC_TRACKER = 1;
29 static constexpr int DETAIL_ALLOC_TRACKER = 2;
30
31 if constexpr (TRACK_INTERNAL_ALLOCATIONS == SIMPLE_ALLOC_TRACKER) {
32 return new SimpleAllocTracker();
33 } else if (TRACK_INTERNAL_ALLOCATIONS == DETAIL_ALLOC_TRACKER) {
34 return new DetailAllocTracker();
35 } else {
36 UNREACHABLE();
37 }
38 }
39 #endif // TRACK_INTERNAL_ALLOCATIONS
40
41 template <InternalAllocatorConfig Config>
42 Allocator *InternalAllocator<Config>::allocator_from_runtime = nullptr;
43
44 template <InternalAllocatorConfig Config>
InternalAllocator(MemStatsType * mem_stats)45 InternalAllocator<Config>::InternalAllocator(MemStatsType *mem_stats)
46 {
47 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
48 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
49 runslots_allocator_ = new RunSlotsAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
50 freelist_allocator_ = new FreeListAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
51 humongous_allocator_ = new HumongousObjAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
52 } else { // NOLINT(readability-misleading-indentation
53 malloc_allocator_ = new MallocProxyAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
54 }
55
56 #if defined(TRACK_INTERNAL_ALLOCATIONS)
57 mem_stats_ = mem_stats;
58 tracker_ = CreateAllocTracker();
59 #endif // TRACK_INTERNAL_ALLOCATIONS
60 LOG_INTERNAL_ALLOCATOR(INFO) << "Initializing InternalAllocator finished";
61 }
62
63 template <InternalAllocatorConfig Config>
64 template <AllocScope AllocScopeT>
Alloc(size_t size,Alignment align)65 [[nodiscard]] void *InternalAllocator<Config>::Alloc(size_t size, Alignment align)
66 {
67 #ifdef TRACK_INTERNAL_ALLOCATIONS
68 os::memory::LockHolder lock(lock_);
69 #endif // TRACK_INTERNAL_ALLOCATIONS
70 void *res = nullptr;
71 LOG_INTERNAL_ALLOCATOR(INFO) << "Try to allocate " << size << " bytes";
72 if (UNLIKELY(size == 0)) {
73 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Failed to allocate - size of object is zero";
74 return nullptr;
75 }
76 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
77 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
78 res = AllocViaPandaAllocators<AllocScopeT>(size, align);
79 } else { // NOLINT(readability-misleading-indentation
80 res = malloc_allocator_->Alloc(size, align);
81 }
82 if (res == nullptr) {
83 return nullptr;
84 }
85 LOG_INTERNAL_ALLOCATOR(INFO) << "Allocate " << size << " bytes at address " << std::hex << res;
86 #ifdef TRACK_INTERNAL_ALLOCATIONS
87 tracker_->TrackAlloc(res, AlignUp(size, align), SpaceType::SPACE_TYPE_INTERNAL);
88 #endif // TRACK_INTERNAL_ALLOCATIONS
89 return res;
90 }
91
92 template <InternalAllocatorConfig Config>
Free(void * ptr)93 void InternalAllocator<Config>::Free(void *ptr)
94 {
95 #ifdef TRACK_INTERNAL_ALLOCATIONS
96 os::memory::LockHolder lock(lock_);
97 #endif // TRACK_INTERNAL_ALLOCATIONS
98 if (ptr == nullptr) {
99 return;
100 }
101 LOG_INTERNAL_ALLOCATOR(INFO) << "Try to free via InternalAllocator at address " << std::hex << ptr;
102 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
103 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
104 FreeViaPandaAllocators(ptr);
105 } else { // NOLINT(readability-misleading-indentation
106 malloc_allocator_->Free(ptr);
107 }
108 #ifdef TRACK_INTERNAL_ALLOCATIONS
109 tracker_->TrackFree(ptr);
110 #endif // TRACK_INTERNAL_ALLOCATIONS
111 }
112
113 template <InternalAllocatorConfig Config>
~InternalAllocator()114 InternalAllocator<Config>::~InternalAllocator()
115 {
116 #ifdef TRACK_INTERNAL_ALLOCATIONS
117 if (mem_stats_->GetFootprint(SpaceType::SPACE_TYPE_INTERNAL) != 0) {
118 // Memory leaks are detected.
119 LOG(ERROR, RUNTIME) << "Memory leaks detected.";
120 tracker_->DumpMemLeaks(std::cerr);
121 }
122 tracker_->Dump();
123 delete tracker_;
124 #endif // TRACK_INTERNAL_ALLOCATIONS
125 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Destroying InternalAllocator";
126 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
127 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
128 delete runslots_allocator_;
129 delete freelist_allocator_;
130 delete humongous_allocator_;
131 } else { // NOLINT(readability-misleading-indentation
132 delete malloc_allocator_;
133 }
134 LOG_INTERNAL_ALLOCATOR(INFO) << "Destroying InternalAllocator finished";
135 }
136
137 template <class AllocatorT>
AllocInRunSlots(AllocatorT * runslots_allocator,size_t size,Alignment align,size_t pool_size)138 void *AllocInRunSlots(AllocatorT *runslots_allocator, size_t size, Alignment align, size_t pool_size)
139 {
140 void *res = runslots_allocator->Alloc(size, align);
141 if (res == nullptr) {
142 // Get rid of extra pool adding to the allocator
143 static os::memory::Mutex pool_lock;
144 os::memory::LockHolder lock(pool_lock);
145 while (true) {
146 res = runslots_allocator->Alloc(size, align);
147 if (res != nullptr) {
148 break;
149 }
150 LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator didn't allocate memory, try to add new pool";
151 auto pool = PoolManager::GetMmapMemPool()->AllocPool(pool_size, SpaceType::SPACE_TYPE_INTERNAL,
152 AllocatorType::RUNSLOTS_ALLOCATOR, runslots_allocator);
153 if (UNLIKELY(pool.GetMem() == nullptr)) {
154 return nullptr;
155 }
156 runslots_allocator->AddMemoryPool(pool.GetMem(), pool.GetSize());
157 LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator try to allocate memory again after pool adding";
158 }
159 }
160 return res;
161 }
162
163 template <InternalAllocatorConfig Config>
164 template <AllocScope AllocScopeT>
AllocViaPandaAllocators(size_t size,Alignment align)165 void *InternalAllocator<Config>::AllocViaPandaAllocators(size_t size, Alignment align)
166 {
167 void *res = nullptr;
168 size_t aligned_size = AlignUp(size, GetAlignmentInBytes(align));
169 static_assert(RunSlotsAllocatorT::GetMaxSize() == LocalSmallObjectAllocator::GetMaxSize());
170 if (LIKELY(aligned_size <= RunSlotsAllocatorT::GetMaxSize())) {
171 // NOLINTNEXTLINE(readability-braces-around-statements)
172 if constexpr (AllocScopeT == AllocScope::GLOBAL) {
173 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use RunSlotsAllocator";
174 res = AllocInRunSlots(runslots_allocator_, size, align, RunSlotsAllocatorT::GetMinPoolSize());
175 if (res == nullptr) {
176 return nullptr;
177 }
178 } else { // NOLINT(readability-misleading-indentation)
179 static_assert(AllocScopeT == AllocScope::LOCAL);
180 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use thread-local RunSlotsAllocator";
181 ASSERT(panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator() != nullptr);
182 res = AllocInRunSlots(panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator(), size, align,
183 LocalSmallObjectAllocator::GetMinPoolSize());
184 if (res == nullptr) {
185 return nullptr;
186 }
187 }
188 } else if (aligned_size <= FreeListAllocatorT::GetMaxSize()) {
189 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use FreeListAllocator";
190 res = freelist_allocator_->Alloc(size, align);
191 if (res == nullptr) {
192 // Get rid of extra pool adding to the allocator
193 static os::memory::Mutex pool_lock;
194 os::memory::LockHolder lock(pool_lock);
195 while (true) {
196 res = freelist_allocator_->Alloc(size, align);
197 if (res != nullptr) {
198 break;
199 }
200 LOG_INTERNAL_ALLOCATOR(DEBUG) << "FreeListAllocator didn't allocate memory, try to add new pool";
201 size_t pool_size = FreeListAllocatorT::GetMinPoolSize();
202 auto pool = PoolManager::GetMmapMemPool()->AllocPool(
203 pool_size, SpaceType::SPACE_TYPE_INTERNAL, AllocatorType::FREELIST_ALLOCATOR, freelist_allocator_);
204 if (UNLIKELY(pool.GetMem() == nullptr)) {
205 return nullptr;
206 }
207 freelist_allocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
208 }
209 }
210 } else {
211 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use HumongousObjAllocator";
212 res = humongous_allocator_->Alloc(size, align);
213 if (res == nullptr) {
214 // Get rid of extra pool adding to the allocator
215 static os::memory::Mutex pool_lock;
216 os::memory::LockHolder lock(pool_lock);
217 while (true) {
218 res = humongous_allocator_->Alloc(size, align);
219 if (res != nullptr) {
220 break;
221 }
222 LOG_INTERNAL_ALLOCATOR(DEBUG) << "HumongousObjAllocator didn't allocate memory, try to add new pool";
223 size_t pool_size = HumongousObjAllocatorT::GetMinPoolSize(size);
224 auto pool =
225 PoolManager::GetMmapMemPool()->AllocPool(pool_size, SpaceType::SPACE_TYPE_INTERNAL,
226 AllocatorType::HUMONGOUS_ALLOCATOR, humongous_allocator_);
227 if (UNLIKELY(pool.GetMem() == nullptr)) {
228 return nullptr;
229 }
230 humongous_allocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
231 }
232 }
233 }
234 return res;
235 }
236
237 template <InternalAllocatorConfig Config>
FreeViaPandaAllocators(void * ptr)238 void InternalAllocator<Config>::FreeViaPandaAllocators(void *ptr)
239 {
240 AllocatorType alloc_type = PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetType();
241 switch (alloc_type) {
242 case AllocatorType::RUNSLOTS_ALLOCATOR:
243 if (PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
244 runslots_allocator_) {
245 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via RunSlotsAllocator";
246 runslots_allocator_->Free(ptr);
247 } else {
248 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via thread-local RunSlotsAllocator";
249 // It is a thread-local internal allocator instance
250 LocalSmallObjectAllocator *local_allocator =
251 panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator();
252 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
253 local_allocator);
254 local_allocator->Free(ptr);
255 }
256 break;
257 case AllocatorType::FREELIST_ALLOCATOR:
258 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via FreeListAllocator";
259 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
260 freelist_allocator_);
261 freelist_allocator_->Free(ptr);
262 break;
263 case AllocatorType::HUMONGOUS_ALLOCATOR:
264 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via HumongousObjAllocator";
265 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
266 humongous_allocator_);
267 humongous_allocator_->Free(ptr);
268 break;
269 default:
270 UNREACHABLE();
271 break;
272 }
273 }
274
275 /* static */
276 template <InternalAllocatorConfig Config>
SetUpLocalInternalAllocator(Allocator * allocator)277 typename InternalAllocator<Config>::LocalSmallObjectAllocator *InternalAllocator<Config>::SetUpLocalInternalAllocator(
278 Allocator *allocator)
279 {
280 (void)allocator;
281 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
282 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
283 auto local_allocator = allocator->New<LocalSmallObjectAllocator>(allocator->GetMemStats());
284 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Set up local internal allocator at addr " << local_allocator
285 << " for the thread " << panda::Thread::GetCurrent();
286 return local_allocator;
287 }
288 return nullptr;
289 }
290
291 /* static */
292 template <InternalAllocatorConfig Config>
FinalizeLocalInternalAllocator(InternalAllocator::LocalSmallObjectAllocator * local_allocator,Allocator * allocator)293 void InternalAllocator<Config>::FinalizeLocalInternalAllocator(
294 InternalAllocator::LocalSmallObjectAllocator *local_allocator, Allocator *allocator)
295 {
296 (void)local_allocator;
297 (void)allocator;
298 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
299 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
300 local_allocator->VisitAndRemoveAllPools(
301 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
302 allocator->Delete(local_allocator);
303 }
304 }
305
306 /* static */
307 template <InternalAllocatorConfig Config>
RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator * local_allocator)308 void InternalAllocator<Config>::RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator *local_allocator)
309 {
310 (void)local_allocator;
311 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
312 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
313 local_allocator->VisitAndRemoveFreePools(
314 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
315 }
316 }
317
318 template <InternalAllocatorConfig Config>
InitInternalAllocatorFromRuntime(Allocator * allocator)319 void InternalAllocator<Config>::InitInternalAllocatorFromRuntime(Allocator *allocator)
320 {
321 ASSERT(allocator_from_runtime == nullptr);
322 allocator_from_runtime = allocator;
323 }
324
325 template <InternalAllocatorConfig Config>
GetInternalAllocatorFromRuntime()326 Allocator *InternalAllocator<Config>::GetInternalAllocatorFromRuntime()
327 {
328 return allocator_from_runtime;
329 }
330
331 template <InternalAllocatorConfig Config>
ClearInternalAllocatorFromRuntime()332 void InternalAllocator<Config>::ClearInternalAllocatorFromRuntime()
333 {
334 allocator_from_runtime = nullptr;
335 }
336
337 template class InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>;
338 template class InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>;
339 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::GLOBAL>(size_t,
340 Alignment);
341 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::LOCAL>(size_t,
342 Alignment);
343 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::GLOBAL>(size_t,
344 Alignment);
345 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::LOCAL>(size_t,
346 Alignment);
347
348 #undef LOG_INTERNAL_ALLOCATOR
349
350 } // namespace panda::mem
351