1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "include/runtime.h"
17 #include "runtime/mem/internal_allocator-inl.h"
18 #include "runtime/include/thread.h"
19
20 namespace panda::mem {
21
22 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
23 #define LOG_INTERNAL_ALLOCATOR(level) LOG(level, ALLOC) << "InternalAllocator: "
24
25 #if defined(TRACK_INTERNAL_ALLOCATIONS)
CreateAllocTracker()26 static AllocTracker *CreateAllocTracker()
27 {
28 static constexpr int SIMPLE_ALLOC_TRACKER = 1;
29 static constexpr int DETAIL_ALLOC_TRACKER = 2;
30
31 if constexpr (TRACK_INTERNAL_ALLOCATIONS == SIMPLE_ALLOC_TRACKER) {
32 return new SimpleAllocTracker();
33 } else if (TRACK_INTERNAL_ALLOCATIONS == DETAIL_ALLOC_TRACKER) {
34 return new DetailAllocTracker();
35 } else {
36 UNREACHABLE();
37 }
38 }
39 #endif // TRACK_INTERNAL_ALLOCATIONS
40
41 template <InternalAllocatorConfig Config>
42 Allocator *InternalAllocator<Config>::allocator_from_runtime = nullptr;
43
44 template <InternalAllocatorConfig Config>
InternalAllocator(MemStatsType * mem_stats)45 InternalAllocator<Config>::InternalAllocator(MemStatsType *mem_stats)
46 {
47 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
48 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
49 runslots_allocator_ = new RunSlotsAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
50 freelist_allocator_ = new FreeListAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
51 humongous_allocator_ = new HumongousObjAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
52 } else { // NOLINT(readability-misleading-indentation
53 malloc_allocator_ = new MallocProxyAllocatorT(mem_stats, SpaceType::SPACE_TYPE_INTERNAL);
54 }
55
56 #if defined(TRACK_INTERNAL_ALLOCATIONS)
57 mem_stats_ = mem_stats;
58 tracker_ = CreateAllocTracker();
59 #endif // TRACK_INTERNAL_ALLOCATIONS
60 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Initializing InternalAllocator finished";
61 }
62
63 template <InternalAllocatorConfig Config>
64 template <AllocScope AllocScopeT>
Alloc(size_t size,Alignment align)65 [[nodiscard]] void *InternalAllocator<Config>::Alloc(size_t size, Alignment align)
66 {
67 #ifdef TRACK_INTERNAL_ALLOCATIONS
68 os::memory::LockHolder lock(lock_);
69 #endif // TRACK_INTERNAL_ALLOCATIONS
70 void *res = nullptr;
71 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to allocate " << size << " bytes";
72 if (UNLIKELY(size == 0)) {
73 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Failed to allocate - size of object is zero";
74 return nullptr;
75 }
76 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
77 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
78 res = AllocViaPandaAllocators<AllocScopeT>(size, align);
79 } else { // NOLINT(readability-misleading-indentation
80 res = malloc_allocator_->Alloc(size, align);
81 }
82 if (res == nullptr) {
83 return nullptr;
84 }
85 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Allocate " << size << " bytes at address " << std::hex << res;
86 #ifdef TRACK_INTERNAL_ALLOCATIONS
87 tracker_->TrackAlloc(res, AlignUp(size, align), SpaceType::SPACE_TYPE_INTERNAL);
88 #endif // TRACK_INTERNAL_ALLOCATIONS
89 return res;
90 }
91
92 template <InternalAllocatorConfig Config>
Free(void * ptr)93 void InternalAllocator<Config>::Free(void *ptr)
94 {
95 #ifdef TRACK_INTERNAL_ALLOCATIONS
96 os::memory::LockHolder lock(lock_);
97 #endif // TRACK_INTERNAL_ALLOCATIONS
98 if (ptr == nullptr) {
99 return;
100 }
101 #ifdef TRACK_INTERNAL_ALLOCATIONS
102 // Do it before actual free even we don't do something with ptr.
103 // Clang tidy detects memory at ptr gets unavailable after free
104 // and reports errors.
105 tracker_->TrackFree(ptr);
106 #endif // TRACK_INTERNAL_ALLOCATIONS
107 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to free via InternalAllocator at address " << std::hex << ptr;
108 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
109 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
110 FreeViaPandaAllocators(ptr);
111 } else { // NOLINT(readability-misleading-indentation
112 malloc_allocator_->Free(ptr);
113 }
114 }
115
116 template <InternalAllocatorConfig Config>
~InternalAllocator()117 InternalAllocator<Config>::~InternalAllocator()
118 {
119 #ifdef TRACK_INTERNAL_ALLOCATIONS
120 if (mem_stats_->GetFootprint(SpaceType::SPACE_TYPE_INTERNAL) != 0) {
121 // Memory leaks are detected.
122 LOG(ERROR, RUNTIME) << "Memory leaks detected.";
123 tracker_->DumpMemLeaks(std::cerr);
124 }
125 tracker_->Dump();
126 delete tracker_;
127 #endif // TRACK_INTERNAL_ALLOCATIONS
128 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Destroying InternalAllocator";
129 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
130 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
131 delete runslots_allocator_;
132 delete freelist_allocator_;
133 delete humongous_allocator_;
134 } else { // NOLINT(readability-misleading-indentation
135 delete malloc_allocator_;
136 }
137 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Destroying InternalAllocator finished";
138 }
139
140 template <class AllocatorT>
AllocInRunSlots(AllocatorT * runslots_allocator,size_t size,Alignment align,size_t pool_size)141 void *AllocInRunSlots(AllocatorT *runslots_allocator, size_t size, Alignment align, size_t pool_size)
142 {
143 void *res = runslots_allocator->Alloc(size, align);
144 if (res == nullptr) {
145 // Get rid of extra pool adding to the allocator
146 static os::memory::Mutex pool_lock;
147 os::memory::LockHolder lock(pool_lock);
148 while (true) {
149 res = runslots_allocator->Alloc(size, align);
150 if (res != nullptr) {
151 break;
152 }
153 LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator didn't allocate memory, try to add new pool";
154 auto pool = PoolManager::GetMmapMemPool()->AllocPool(pool_size, SpaceType::SPACE_TYPE_INTERNAL,
155 AllocatorType::RUNSLOTS_ALLOCATOR, runslots_allocator);
156 if (UNLIKELY(pool.GetMem() == nullptr)) {
157 return nullptr;
158 }
159 runslots_allocator->AddMemoryPool(pool.GetMem(), pool.GetSize());
160 LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator try to allocate memory again after pool adding";
161 }
162 }
163 return res;
164 }
165
166 template <InternalAllocatorConfig Config>
167 template <AllocScope AllocScopeT>
AllocViaPandaAllocators(size_t size,Alignment align)168 void *InternalAllocator<Config>::AllocViaPandaAllocators(size_t size, Alignment align)
169 {
170 void *res = nullptr;
171 size_t aligned_size = AlignUp(size, GetAlignmentInBytes(align));
172 static_assert(RunSlotsAllocatorT::GetMaxSize() == LocalSmallObjectAllocator::GetMaxSize());
173 if (LIKELY(aligned_size <= RunSlotsAllocatorT::GetMaxSize())) {
174 // NOLINTNEXTLINE(readability-braces-around-statements)
175 if constexpr (AllocScopeT == AllocScope::GLOBAL) {
176 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use RunSlotsAllocator";
177 res = AllocInRunSlots(runslots_allocator_, size, align, RunSlotsAllocatorT::GetMinPoolSize());
178 if (res == nullptr) {
179 return nullptr;
180 }
181 } else { // NOLINT(readability-misleading-indentation)
182 static_assert(AllocScopeT == AllocScope::LOCAL);
183 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use thread-local RunSlotsAllocator";
184 ASSERT(panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator() != nullptr);
185 res = AllocInRunSlots(panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator(), size, align,
186 LocalSmallObjectAllocator::GetMinPoolSize());
187 if (res == nullptr) {
188 return nullptr;
189 }
190 }
191 } else if (aligned_size <= FreeListAllocatorT::GetMaxSize()) {
192 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use FreeListAllocator";
193 res = freelist_allocator_->Alloc(size, align);
194 if (res == nullptr) {
195 // Get rid of extra pool adding to the allocator
196 static os::memory::Mutex pool_lock;
197 os::memory::LockHolder lock(pool_lock);
198 while (true) {
199 res = freelist_allocator_->Alloc(size, align);
200 if (res != nullptr) {
201 break;
202 }
203 LOG_INTERNAL_ALLOCATOR(DEBUG) << "FreeListAllocator didn't allocate memory, try to add new pool";
204 size_t pool_size = FreeListAllocatorT::GetMinPoolSize();
205 auto pool = PoolManager::GetMmapMemPool()->AllocPool(
206 pool_size, SpaceType::SPACE_TYPE_INTERNAL, AllocatorType::FREELIST_ALLOCATOR, freelist_allocator_);
207 if (UNLIKELY(pool.GetMem() == nullptr)) {
208 return nullptr;
209 }
210 freelist_allocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
211 }
212 }
213 } else {
214 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use HumongousObjAllocator";
215 res = humongous_allocator_->Alloc(size, align);
216 if (res == nullptr) {
217 // Get rid of extra pool adding to the allocator
218 static os::memory::Mutex pool_lock;
219 os::memory::LockHolder lock(pool_lock);
220 while (true) {
221 res = humongous_allocator_->Alloc(size, align);
222 if (res != nullptr) {
223 break;
224 }
225 LOG_INTERNAL_ALLOCATOR(DEBUG) << "HumongousObjAllocator didn't allocate memory, try to add new pool";
226 size_t pool_size = HumongousObjAllocatorT::GetMinPoolSize(size);
227 auto pool =
228 PoolManager::GetMmapMemPool()->AllocPool(pool_size, SpaceType::SPACE_TYPE_INTERNAL,
229 AllocatorType::HUMONGOUS_ALLOCATOR, humongous_allocator_);
230 if (UNLIKELY(pool.GetMem() == nullptr)) {
231 return nullptr;
232 }
233 humongous_allocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
234 }
235 }
236 }
237 return res;
238 }
239
240 template <InternalAllocatorConfig Config>
FreeViaPandaAllocators(void * ptr)241 void InternalAllocator<Config>::FreeViaPandaAllocators(void *ptr)
242 {
243 AllocatorType alloc_type = PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetType();
244 switch (alloc_type) {
245 case AllocatorType::RUNSLOTS_ALLOCATOR:
246 if (PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
247 runslots_allocator_) {
248 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via RunSlotsAllocator";
249 runslots_allocator_->Free(ptr);
250 } else {
251 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via thread-local RunSlotsAllocator";
252 // It is a thread-local internal allocator instance
253 LocalSmallObjectAllocator *local_allocator =
254 panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator();
255 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
256 local_allocator);
257 local_allocator->Free(ptr);
258 }
259 break;
260 case AllocatorType::FREELIST_ALLOCATOR:
261 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via FreeListAllocator";
262 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
263 freelist_allocator_);
264 freelist_allocator_->Free(ptr);
265 break;
266 case AllocatorType::HUMONGOUS_ALLOCATOR:
267 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via HumongousObjAllocator";
268 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
269 humongous_allocator_);
270 humongous_allocator_->Free(ptr);
271 break;
272 default:
273 UNREACHABLE();
274 break;
275 }
276 }
277
278 /* static */
279 template <InternalAllocatorConfig Config>
SetUpLocalInternalAllocator(Allocator * allocator)280 typename InternalAllocator<Config>::LocalSmallObjectAllocator *InternalAllocator<Config>::SetUpLocalInternalAllocator(
281 Allocator *allocator)
282 {
283 (void)allocator;
284 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
285 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
286 auto local_allocator =
287 allocator->New<LocalSmallObjectAllocator>(allocator->GetMemStats(), SpaceType::SPACE_TYPE_INTERNAL);
288 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Set up local internal allocator at addr " << local_allocator
289 << " for the thread " << panda::Thread::GetCurrent();
290 return local_allocator;
291 }
292 return nullptr;
293 }
294
295 /* static */
296 template <InternalAllocatorConfig Config>
FinalizeLocalInternalAllocator(InternalAllocator::LocalSmallObjectAllocator * local_allocator,Allocator * allocator)297 void InternalAllocator<Config>::FinalizeLocalInternalAllocator(
298 InternalAllocator::LocalSmallObjectAllocator *local_allocator, Allocator *allocator)
299 {
300 (void)local_allocator;
301 (void)allocator;
302 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
303 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
304 local_allocator->VisitAndRemoveAllPools(
305 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
306 allocator->Delete(local_allocator);
307 }
308 }
309
310 /* static */
311 template <InternalAllocatorConfig Config>
RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator * local_allocator)312 void InternalAllocator<Config>::RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator *local_allocator)
313 {
314 (void)local_allocator;
315 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
316 if constexpr (Config == InternalAllocatorConfig::PANDA_ALLOCATORS) {
317 local_allocator->VisitAndRemoveFreePools(
318 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
319 }
320 }
321
322 template <InternalAllocatorConfig Config>
InitInternalAllocatorFromRuntime(Allocator * allocator)323 void InternalAllocator<Config>::InitInternalAllocatorFromRuntime(Allocator *allocator)
324 {
325 ASSERT(allocator_from_runtime == nullptr);
326 allocator_from_runtime = allocator;
327 }
328
329 template <InternalAllocatorConfig Config>
GetInternalAllocatorFromRuntime()330 Allocator *InternalAllocator<Config>::GetInternalAllocatorFromRuntime()
331 {
332 return allocator_from_runtime;
333 }
334
335 template <InternalAllocatorConfig Config>
ClearInternalAllocatorFromRuntime()336 void InternalAllocator<Config>::ClearInternalAllocatorFromRuntime()
337 {
338 allocator_from_runtime = nullptr;
339 }
340
341 template class InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>;
342 template class InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>;
343 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::GLOBAL>(size_t,
344 Alignment);
345 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::LOCAL>(size_t,
346 Alignment);
347 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::GLOBAL>(size_t,
348 Alignment);
349 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::LOCAL>(size_t,
350 Alignment);
351
352 #undef LOG_INTERNAL_ALLOCATOR
353
354 } // namespace panda::mem
355