• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "include/runtime.h"
17 #include "runtime/mem/internal_allocator-inl.h"
18 #include "runtime/include/thread.h"
19 
20 namespace ark::mem {
21 
22 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
23 #define LOG_INTERNAL_ALLOCATOR(level) LOG(level, ALLOC) << "InternalAllocator: "
24 
25 #if defined(TRACK_INTERNAL_ALLOCATIONS)
CreateAllocTracker()26 static AllocTracker *CreateAllocTracker()
27 {
28     static constexpr int SIMPLE_ALLOC_TRACKER = 1;
29     static constexpr int DETAIL_ALLOC_TRACKER = 2;
30 
31     if constexpr (TRACK_INTERNAL_ALLOCATIONS == SIMPLE_ALLOC_TRACKER) {
32         return new SimpleAllocTracker();
33     } else if (TRACK_INTERNAL_ALLOCATIONS == DETAIL_ALLOC_TRACKER) {
34         return new DetailAllocTracker();
35     } else {
36         UNREACHABLE();
37     }
38 }
39 #endif  // TRACK_INTERNAL_ALLOCATIONS
40 
41 template <InternalAllocatorConfig CONFIG>
42 Allocator *InternalAllocator<CONFIG>::allocatorFromRuntime_ = nullptr;
43 
44 template <InternalAllocatorConfig CONFIG>
InternalAllocator(MemStatsType * memStats)45 InternalAllocator<CONFIG>::InternalAllocator(MemStatsType *memStats)
46 {
47     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
48     if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
49         runslotsAllocator_ = new RunSlotsAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
50         freelistAllocator_ = new FreeListAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
51         humongousAllocator_ = new HumongousObjAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
52     } else {  // NOLINT(readability-misleading-indentation
53         mallocAllocator_ = new MallocProxyAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
54     }
55 
56 #if defined(TRACK_INTERNAL_ALLOCATIONS)
57     memStats_ = memStats;
58     tracker_ = CreateAllocTracker();
59 #endif  // TRACK_INTERNAL_ALLOCATIONS
60     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Initializing InternalAllocator finished";
61 }
62 
63 template <InternalAllocatorConfig CONFIG>
64 template <AllocScope ALLOC_SCOPE_T>
Alloc(size_t size,Alignment align)65 [[nodiscard]] void *InternalAllocator<CONFIG>::Alloc(size_t size, Alignment align)
66 {
67 #ifdef TRACK_INTERNAL_ALLOCATIONS
68     os::memory::LockHolder lock(lock_);
69 #endif  // TRACK_INTERNAL_ALLOCATIONS
70     void *res = nullptr;
71     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to allocate " << size << " bytes";
72     if (UNLIKELY(size == 0)) {
73         LOG_INTERNAL_ALLOCATOR(DEBUG) << "Failed to allocate - size of object is zero";
74         return nullptr;
75     }
76     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
77     if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
78         res = AllocViaPandaAllocators<ALLOC_SCOPE_T>(size, align);
79     } else {  // NOLINT(readability-misleading-indentation
80         res = mallocAllocator_->Alloc(size, align);
81     }
82     if (res == nullptr) {
83         return nullptr;
84     }
85     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Allocate " << size << " bytes at address " << std::hex << res;
86 #ifdef TRACK_INTERNAL_ALLOCATIONS
87     tracker_->TrackAlloc(res, AlignUp(size, align), SpaceType::SPACE_TYPE_INTERNAL);
88 #endif  // TRACK_INTERNAL_ALLOCATIONS
89     return res;
90 }
91 
92 template <InternalAllocatorConfig CONFIG>
Free(void * ptr)93 void InternalAllocator<CONFIG>::Free(void *ptr)
94 {
95 #ifdef TRACK_INTERNAL_ALLOCATIONS
96     os::memory::LockHolder lock(lock_);
97 #endif  // TRACK_INTERNAL_ALLOCATIONS
98     if (ptr == nullptr) {
99         return;
100     }
101 #ifdef TRACK_INTERNAL_ALLOCATIONS
102     // Do it before actual free even we don't do something with ptr.
103     // Clang tidy detects memory at ptr gets unavailable after free
104     // and reports errors.
105     tracker_->TrackFree(ptr);
106 #endif  // TRACK_INTERNAL_ALLOCATIONS
107     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to free via InternalAllocator at address " << std::hex << ptr;
108     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
109     if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
110         FreeViaPandaAllocators(ptr);
111     } else {  // NOLINT(readability-misleading-indentation
112         mallocAllocator_->Free(ptr);
113     }
114 }
115 
116 template <InternalAllocatorConfig CONFIG>
~InternalAllocator()117 InternalAllocator<CONFIG>::~InternalAllocator()
118 {
119 #ifdef TRACK_INTERNAL_ALLOCATIONS
120     if (memStats_->GetFootprint(SpaceType::SPACE_TYPE_INTERNAL) != 0) {
121         // Memory leaks are detected.
122         LOG(ERROR, RUNTIME) << "Memory leaks detected.";
123         tracker_->DumpMemLeaks(std::cerr);
124     }
125     tracker_->Dump();
126     delete tracker_;
127 #endif  // TRACK_INTERNAL_ALLOCATIONS
128     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Destroying InternalAllocator";
129     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
130     if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
131         delete runslotsAllocator_;
132         delete freelistAllocator_;
133         delete humongousAllocator_;
134     } else {  // NOLINT(readability-misleading-indentation
135         delete mallocAllocator_;
136     }
137     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Destroying InternalAllocator finished";
138 }
139 
140 template <class AllocatorT>
AllocInRunSlots(AllocatorT * runslotsAllocator,size_t size,Alignment align,size_t poolSize)141 void *AllocInRunSlots(AllocatorT *runslotsAllocator, size_t size, Alignment align, size_t poolSize)
142 {
143     void *res = runslotsAllocator->Alloc(size, align);
144     if (res == nullptr) {
145         // Get rid of extra pool adding to the allocator
146         static os::memory::Mutex poolLock;
147         os::memory::LockHolder lock(poolLock);
148         while (true) {
149             res = runslotsAllocator->Alloc(size, align);
150             if (res != nullptr) {
151                 break;
152             }
153             LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator didn't allocate memory, try to add new pool";
154             auto pool = PoolManager::GetMmapMemPool()->AllocPool(poolSize, SpaceType::SPACE_TYPE_INTERNAL,
155                                                                  AllocatorType::RUNSLOTS_ALLOCATOR, runslotsAllocator);
156             if (UNLIKELY(pool.GetMem() == nullptr)) {
157                 return nullptr;
158             }
159             runslotsAllocator->AddMemoryPool(pool.GetMem(), pool.GetSize());
160             LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator try to allocate memory again after pool adding";
161         }
162     }
163     return res;
164 }
165 
166 template <InternalAllocatorConfig CONFIG>
167 template <AllocScope ALLOC_SCOPE_T>
AllocViaRunSlotsAllocator(size_t size,Alignment align)168 void *InternalAllocator<CONFIG>::AllocViaRunSlotsAllocator(size_t size, Alignment align)
169 {
170     void *res = nullptr;
171     if constexpr (ALLOC_SCOPE_T == AllocScope::GLOBAL) {
172         LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use RunSlotsAllocator";
173         res = AllocInRunSlots(runslotsAllocator_, size, align, RunSlotsAllocatorT::GetMinPoolSize());
174     } else {
175         static_assert(ALLOC_SCOPE_T == AllocScope::LOCAL);
176         LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use thread-local RunSlotsAllocator";
177         ASSERT(ark::ManagedThread::GetCurrent()->GetLocalInternalAllocator() != nullptr);
178         res = AllocInRunSlots(ark::ManagedThread::GetCurrent()->GetLocalInternalAllocator(), size, align,
179                               LocalSmallObjectAllocator::GetMinPoolSize());
180     }
181     return res;
182 }
183 
184 template <InternalAllocatorConfig CONFIG>
AllocViaFreeListAllocator(size_t size,Alignment align)185 void *InternalAllocator<CONFIG>::AllocViaFreeListAllocator(size_t size, Alignment align)
186 {
187     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use FreeListAllocator";
188     void *res = freelistAllocator_->Alloc(size, align);
189     if (res != nullptr) {
190         return res;
191     }
192     // Get rid of extra pool adding to the allocator
193     static os::memory::Mutex poolLock;
194     os::memory::LockHolder lock(poolLock);
195     while (true) {
196         res = freelistAllocator_->Alloc(size, align);
197         if (res != nullptr) {
198             break;
199         }
200         LOG_INTERNAL_ALLOCATOR(DEBUG) << "FreeListAllocator didn't allocate memory, try to add new pool";
201         size_t poolSize = FreeListAllocatorT::GetMinPoolSize();
202         auto pool = PoolManager::GetMmapMemPool()->AllocPool(poolSize, SpaceType::SPACE_TYPE_INTERNAL,
203                                                              AllocatorType::FREELIST_ALLOCATOR, freelistAllocator_);
204         if (UNLIKELY(pool.GetMem() == nullptr)) {
205             return nullptr;
206         }
207         freelistAllocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
208     }
209     return res;
210 }
211 
212 template <InternalAllocatorConfig CONFIG>
AllocViaHumongousAllocator(size_t size,Alignment align)213 void *InternalAllocator<CONFIG>::AllocViaHumongousAllocator(size_t size, Alignment align)
214 {
215     LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use HumongousObjAllocator";
216     void *res = humongousAllocator_->Alloc(size, align);
217     if (res != nullptr) {
218         return res;
219     }
220     // Get rid of extra pool adding to the allocator
221     static os::memory::Mutex poolLock;
222     os::memory::LockHolder lock(poolLock);
223     while (true) {
224         res = humongousAllocator_->Alloc(size, align);
225         if (res != nullptr) {
226             break;
227         }
228         LOG_INTERNAL_ALLOCATOR(DEBUG) << "HumongousObjAllocator didn't allocate memory, try to add new pool";
229         size_t poolSize = HumongousObjAllocatorT::GetMinPoolSize(size);
230         auto pool = PoolManager::GetMmapMemPool()->AllocPool(poolSize, SpaceType::SPACE_TYPE_INTERNAL,
231                                                              AllocatorType::HUMONGOUS_ALLOCATOR, humongousAllocator_);
232         if (UNLIKELY(pool.GetMem() == nullptr)) {
233             return nullptr;
234         }
235         humongousAllocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
236     }
237     return res;
238 }
239 
240 template <InternalAllocatorConfig CONFIG>
241 template <AllocScope ALLOC_SCOPE_T>
AllocViaPandaAllocators(size_t size,Alignment align)242 void *InternalAllocator<CONFIG>::AllocViaPandaAllocators(size_t size, Alignment align)
243 {
244     void *res = nullptr;
245     size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
246     static_assert(RunSlotsAllocatorT::GetMaxSize() == LocalSmallObjectAllocator::GetMaxSize());
247     if (LIKELY(alignedSize <= RunSlotsAllocatorT::GetMaxSize())) {
248         res = this->AllocViaRunSlotsAllocator<ALLOC_SCOPE_T>(size, align);
249     } else if (alignedSize <= FreeListAllocatorT::GetMaxSize()) {
250         res = this->AllocViaFreeListAllocator(size, align);
251     } else {
252         res = this->AllocViaHumongousAllocator(size, align);
253     }
254     return res;
255 }
256 
257 template <InternalAllocatorConfig CONFIG>
FreeViaPandaAllocators(void * ptr)258 void InternalAllocator<CONFIG>::FreeViaPandaAllocators(void *ptr)
259 {
260     AllocatorType allocType = PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetType();
261     switch (allocType) {
262         case AllocatorType::RUNSLOTS_ALLOCATOR:
263             if (PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
264                 runslotsAllocator_) {
265                 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via RunSlotsAllocator";
266                 runslotsAllocator_->Free(ptr);
267             } else {
268                 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via thread-local RunSlotsAllocator";
269                 // It is a thread-local internal allocator instance
270                 LocalSmallObjectAllocator *localAllocator =
271                     ark::ManagedThread::GetCurrent()->GetLocalInternalAllocator();
272                 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
273                        localAllocator);
274                 localAllocator->Free(ptr);
275             }
276             break;
277         case AllocatorType::FREELIST_ALLOCATOR:
278             LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via FreeListAllocator";
279             ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
280                    freelistAllocator_);
281             freelistAllocator_->Free(ptr);
282             break;
283         case AllocatorType::HUMONGOUS_ALLOCATOR:
284             LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via HumongousObjAllocator";
285             ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
286                    humongousAllocator_);
287             humongousAllocator_->Free(ptr);
288             break;
289         default:
290             UNREACHABLE();
291             break;
292     }
293 }
294 
295 /* static */
296 template <InternalAllocatorConfig CONFIG>
SetUpLocalInternalAllocator(Allocator * allocator)297 typename InternalAllocator<CONFIG>::LocalSmallObjectAllocator *InternalAllocator<CONFIG>::SetUpLocalInternalAllocator(
298     Allocator *allocator)
299 {
300     (void)allocator;
301     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
302     if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
303         auto localAllocator =
304             allocator->New<LocalSmallObjectAllocator>(allocator->GetMemStats(), SpaceType::SPACE_TYPE_INTERNAL);
305         LOG_INTERNAL_ALLOCATOR(DEBUG) << "Set up local internal allocator at addr " << localAllocator
306                                       << " for the thread " << ark::Thread::GetCurrent();
307         return localAllocator;
308     }
309     return nullptr;
310 }
311 
312 /* static */
313 template <InternalAllocatorConfig CONFIG>
FinalizeLocalInternalAllocator(InternalAllocator::LocalSmallObjectAllocator * localAllocator,Allocator * allocator)314 void InternalAllocator<CONFIG>::FinalizeLocalInternalAllocator(
315     InternalAllocator::LocalSmallObjectAllocator *localAllocator, Allocator *allocator)
316 {
317     (void)localAllocator;
318     (void)allocator;
319     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
320     if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
321         localAllocator->VisitAndRemoveAllPools(
322             [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
323         allocator->Delete(localAllocator);
324     }
325 }
326 
327 /* static */
328 template <InternalAllocatorConfig CONFIG>
RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator * localAllocator)329 void InternalAllocator<CONFIG>::RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator *localAllocator)
330 {
331     (void)localAllocator;
332     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
333     if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
334         localAllocator->VisitAndRemoveFreePools(
335             [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
336     }
337 }
338 
339 template <InternalAllocatorConfig CONFIG>
InitInternalAllocatorFromRuntime(Allocator * allocator)340 void InternalAllocator<CONFIG>::InitInternalAllocatorFromRuntime(Allocator *allocator)
341 {
342     ASSERT(allocatorFromRuntime_ == nullptr);
343     allocatorFromRuntime_ = allocator;
344 }
345 
346 template <InternalAllocatorConfig CONFIG>
GetInternalAllocatorFromRuntime()347 Allocator *InternalAllocator<CONFIG>::GetInternalAllocatorFromRuntime()
348 {
349     return allocatorFromRuntime_;
350 }
351 
352 template <InternalAllocatorConfig CONFIG>
ClearInternalAllocatorFromRuntime()353 void InternalAllocator<CONFIG>::ClearInternalAllocatorFromRuntime()
354 {
355     allocatorFromRuntime_ = nullptr;
356 }
357 
358 template class InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>;
359 template class InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>;
360 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::GLOBAL>(size_t,
361                                                                                                        Alignment);
362 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::LOCAL>(size_t,
363                                                                                                       Alignment);
364 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::GLOBAL>(size_t,
365                                                                                                        Alignment);
366 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::LOCAL>(size_t,
367                                                                                                       Alignment);
368 
369 #undef LOG_INTERNAL_ALLOCATOR
370 
371 }  // namespace ark::mem
372