1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "include/runtime.h"
17 #include "runtime/mem/internal_allocator-inl.h"
18 #include "runtime/include/thread.h"
19
20 namespace panda::mem {
21
22 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
23 #define LOG_INTERNAL_ALLOCATOR(level) LOG(level, ALLOC) << "InternalAllocator: "
24
25 #if defined(TRACK_INTERNAL_ALLOCATIONS)
CreateAllocTracker()26 static AllocTracker *CreateAllocTracker()
27 {
28 static constexpr int SIMPLE_ALLOC_TRACKER = 1;
29 static constexpr int DETAIL_ALLOC_TRACKER = 2;
30
31 if constexpr (TRACK_INTERNAL_ALLOCATIONS == SIMPLE_ALLOC_TRACKER) {
32 return new SimpleAllocTracker();
33 } else if (TRACK_INTERNAL_ALLOCATIONS == DETAIL_ALLOC_TRACKER) {
34 return new DetailAllocTracker();
35 } else {
36 UNREACHABLE();
37 }
38 }
39 #endif // TRACK_INTERNAL_ALLOCATIONS
40
41 template <InternalAllocatorConfig CONFIG>
42 Allocator *InternalAllocator<CONFIG>::allocatorFromRuntime_ = nullptr;
43
44 template <InternalAllocatorConfig CONFIG>
InternalAllocator(MemStatsType * memStats)45 InternalAllocator<CONFIG>::InternalAllocator(MemStatsType *memStats)
46 {
47 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
48 if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
49 runslotsAllocator_ = new RunSlotsAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
50 freelistAllocator_ = new FreeListAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
51 humongousAllocator_ = new HumongousObjAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
52 } else { // NOLINT(readability-misleading-indentation
53 mallocAllocator_ = new MallocProxyAllocatorT(memStats, SpaceType::SPACE_TYPE_INTERNAL);
54 }
55
56 #if defined(TRACK_INTERNAL_ALLOCATIONS)
57 mem_stats_ = mem_stats;
58 tracker_ = CreateAllocTracker();
59 #endif // TRACK_INTERNAL_ALLOCATIONS
60 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Initializing InternalAllocator finished";
61 }
62
63 template <InternalAllocatorConfig CONFIG>
64 template <AllocScope ALLOC_SCOPE_T>
Alloc(size_t size,Alignment align)65 [[nodiscard]] void *InternalAllocator<CONFIG>::Alloc(size_t size, Alignment align)
66 {
67 #ifdef TRACK_INTERNAL_ALLOCATIONS
68 os::memory::LockHolder lock(lock_);
69 #endif // TRACK_INTERNAL_ALLOCATIONS
70 void *res = nullptr;
71 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to allocate " << size << " bytes";
72 if (UNLIKELY(size == 0)) {
73 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Failed to allocate - size of object is zero";
74 return nullptr;
75 }
76 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
77 if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
78 res = AllocViaPandaAllocators<ALLOC_SCOPE_T>(size, align);
79 } else { // NOLINT(readability-misleading-indentation
80 res = mallocAllocator_->Alloc(size, align);
81 }
82 if (res == nullptr) {
83 return nullptr;
84 }
85 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Allocate " << size << " bytes at address " << std::hex << res;
86 #ifdef TRACK_INTERNAL_ALLOCATIONS
87 tracker_->TrackAlloc(res, AlignUp(size, align), SpaceType::SPACE_TYPE_INTERNAL);
88 #endif // TRACK_INTERNAL_ALLOCATIONS
89 return res;
90 }
91
92 template <InternalAllocatorConfig CONFIG>
Free(void * ptr)93 void InternalAllocator<CONFIG>::Free(void *ptr)
94 {
95 #ifdef TRACK_INTERNAL_ALLOCATIONS
96 os::memory::LockHolder lock(lock_);
97 #endif // TRACK_INTERNAL_ALLOCATIONS
98 if (ptr == nullptr) {
99 return;
100 }
101 #ifdef TRACK_INTERNAL_ALLOCATIONS
102 // Do it before actual free even we don't do something with ptr.
103 // Clang tidy detects memory at ptr gets unavailable after free
104 // and reports errors.
105 tracker_->TrackFree(ptr);
106 #endif // TRACK_INTERNAL_ALLOCATIONS
107 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to free via InternalAllocator at address " << std::hex << ptr;
108 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
109 if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
110 FreeViaPandaAllocators(ptr);
111 } else { // NOLINT(readability-misleading-indentation
112 mallocAllocator_->Free(ptr);
113 }
114 }
115
116 template <InternalAllocatorConfig CONFIG>
~InternalAllocator()117 InternalAllocator<CONFIG>::~InternalAllocator()
118 {
119 #ifdef TRACK_INTERNAL_ALLOCATIONS
120 if (mem_stats_->GetFootprint(SpaceType::SPACE_TYPE_INTERNAL) != 0) {
121 // Memory leaks are detected.
122 LOG(ERROR, RUNTIME) << "Memory leaks detected.";
123 tracker_->DumpMemLeaks(std::cerr);
124 }
125 tracker_->Dump();
126 delete tracker_;
127 #endif // TRACK_INTERNAL_ALLOCATIONS
128 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Destroying InternalAllocator";
129 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
130 if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
131 delete runslotsAllocator_;
132 delete freelistAllocator_;
133 delete humongousAllocator_;
134 } else { // NOLINT(readability-misleading-indentation
135 delete mallocAllocator_;
136 }
137 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Destroying InternalAllocator finished";
138 }
139
140 template <class AllocatorT>
AllocInRunSlots(AllocatorT * runslotsAllocator,size_t size,Alignment align,size_t poolSize)141 void *AllocInRunSlots(AllocatorT *runslotsAllocator, size_t size, Alignment align, size_t poolSize)
142 {
143 void *res = runslotsAllocator->Alloc(size, align);
144 if (res == nullptr) {
145 // Get rid of extra pool adding to the allocator
146 static os::memory::Mutex poolLock;
147 os::memory::LockHolder lock(poolLock);
148 while (true) {
149 res = runslotsAllocator->Alloc(size, align);
150 if (res != nullptr) {
151 break;
152 }
153 LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator didn't allocate memory, try to add new pool";
154 auto pool = PoolManager::GetMmapMemPool()->AllocPool(poolSize, SpaceType::SPACE_TYPE_INTERNAL,
155 AllocatorType::RUNSLOTS_ALLOCATOR, runslotsAllocator);
156 if (UNLIKELY(pool.GetMem() == nullptr)) {
157 return nullptr;
158 }
159 runslotsAllocator->AddMemoryPool(pool.GetMem(), pool.GetSize());
160 LOG_INTERNAL_ALLOCATOR(DEBUG) << "RunSlotsAllocator try to allocate memory again after pool adding";
161 }
162 }
163 return res;
164 }
165
166 template <InternalAllocatorConfig CONFIG>
167 template <AllocScope ALLOC_SCOPE_T>
AllocViaPandaAllocators(size_t size,Alignment align)168 void *InternalAllocator<CONFIG>::AllocViaPandaAllocators(size_t size, Alignment align)
169 {
170 void *res = nullptr;
171 size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
172 static_assert(RunSlotsAllocatorT::GetMaxSize() == LocalSmallObjectAllocator::GetMaxSize());
173 if (LIKELY(alignedSize <= RunSlotsAllocatorT::GetMaxSize())) {
174 // NOLINTNEXTLINE(readability-braces-around-statements)
175 if constexpr (ALLOC_SCOPE_T == AllocScope::GLOBAL) {
176 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use RunSlotsAllocator";
177 res = AllocInRunSlots(runslotsAllocator_, size, align, RunSlotsAllocatorT::GetMinPoolSize());
178 if (res == nullptr) {
179 return nullptr;
180 }
181 } else { // NOLINT(readability-misleading-indentation)
182 static_assert(ALLOC_SCOPE_T == AllocScope::LOCAL);
183 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use thread-local RunSlotsAllocator";
184 ASSERT(panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator() != nullptr);
185 res = AllocInRunSlots(panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator(), size, align,
186 LocalSmallObjectAllocator::GetMinPoolSize());
187 if (res == nullptr) {
188 return nullptr;
189 }
190 }
191 } else if (alignedSize <= FreeListAllocatorT::GetMaxSize()) {
192 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use FreeListAllocator";
193 res = freelistAllocator_->Alloc(size, align);
194 if (res == nullptr) {
195 // Get rid of extra pool adding to the allocator
196 static os::memory::Mutex poolLock;
197 os::memory::LockHolder lock(poolLock);
198 while (true) {
199 res = freelistAllocator_->Alloc(size, align);
200 if (res != nullptr) {
201 break;
202 }
203 LOG_INTERNAL_ALLOCATOR(DEBUG) << "FreeListAllocator didn't allocate memory, try to add new pool";
204 size_t poolSize = FreeListAllocatorT::GetMinPoolSize();
205 auto pool = PoolManager::GetMmapMemPool()->AllocPool(
206 poolSize, SpaceType::SPACE_TYPE_INTERNAL, AllocatorType::FREELIST_ALLOCATOR, freelistAllocator_);
207 if (UNLIKELY(pool.GetMem() == nullptr)) {
208 return nullptr;
209 }
210 freelistAllocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
211 }
212 }
213 } else {
214 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Try to use HumongousObjAllocator";
215 res = humongousAllocator_->Alloc(size, align);
216 if (res == nullptr) {
217 // Get rid of extra pool adding to the allocator
218 static os::memory::Mutex poolLock;
219 os::memory::LockHolder lock(poolLock);
220 while (true) {
221 res = humongousAllocator_->Alloc(size, align);
222 if (res != nullptr) {
223 break;
224 }
225 LOG_INTERNAL_ALLOCATOR(DEBUG) << "HumongousObjAllocator didn't allocate memory, try to add new pool";
226 size_t poolSize = HumongousObjAllocatorT::GetMinPoolSize(size);
227 auto pool = PoolManager::GetMmapMemPool()->AllocPool(
228 poolSize, SpaceType::SPACE_TYPE_INTERNAL, AllocatorType::HUMONGOUS_ALLOCATOR, humongousAllocator_);
229 if (UNLIKELY(pool.GetMem() == nullptr)) {
230 return nullptr;
231 }
232 humongousAllocator_->AddMemoryPool(pool.GetMem(), pool.GetSize());
233 }
234 }
235 }
236 return res;
237 }
238
239 template <InternalAllocatorConfig CONFIG>
FreeViaPandaAllocators(void * ptr)240 void InternalAllocator<CONFIG>::FreeViaPandaAllocators(void *ptr)
241 {
242 AllocatorType allocType = PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetType();
243 switch (allocType) {
244 case AllocatorType::RUNSLOTS_ALLOCATOR:
245 if (PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
246 runslotsAllocator_) {
247 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via RunSlotsAllocator";
248 runslotsAllocator_->Free(ptr);
249 } else {
250 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via thread-local RunSlotsAllocator";
251 // It is a thread-local internal allocator instance
252 LocalSmallObjectAllocator *localAllocator =
253 panda::ManagedThread::GetCurrent()->GetLocalInternalAllocator();
254 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
255 localAllocator);
256 localAllocator->Free(ptr);
257 }
258 break;
259 case AllocatorType::FREELIST_ALLOCATOR:
260 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via FreeListAllocator";
261 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
262 freelistAllocator_);
263 freelistAllocator_->Free(ptr);
264 break;
265 case AllocatorType::HUMONGOUS_ALLOCATOR:
266 LOG_INTERNAL_ALLOCATOR(DEBUG) << "free via HumongousObjAllocator";
267 ASSERT(PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ptr).GetAllocatorHeaderAddr() ==
268 humongousAllocator_);
269 humongousAllocator_->Free(ptr);
270 break;
271 default:
272 UNREACHABLE();
273 break;
274 }
275 }
276
277 /* static */
278 template <InternalAllocatorConfig CONFIG>
SetUpLocalInternalAllocator(Allocator * allocator)279 typename InternalAllocator<CONFIG>::LocalSmallObjectAllocator *InternalAllocator<CONFIG>::SetUpLocalInternalAllocator(
280 Allocator *allocator)
281 {
282 (void)allocator;
283 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
284 if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
285 auto localAllocator =
286 allocator->New<LocalSmallObjectAllocator>(allocator->GetMemStats(), SpaceType::SPACE_TYPE_INTERNAL);
287 LOG_INTERNAL_ALLOCATOR(DEBUG) << "Set up local internal allocator at addr " << localAllocator
288 << " for the thread " << panda::Thread::GetCurrent();
289 return localAllocator;
290 }
291 return nullptr;
292 }
293
294 /* static */
295 template <InternalAllocatorConfig CONFIG>
FinalizeLocalInternalAllocator(InternalAllocator::LocalSmallObjectAllocator * localAllocator,Allocator * allocator)296 void InternalAllocator<CONFIG>::FinalizeLocalInternalAllocator(
297 InternalAllocator::LocalSmallObjectAllocator *localAllocator, Allocator *allocator)
298 {
299 (void)localAllocator;
300 (void)allocator;
301 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
302 if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
303 localAllocator->VisitAndRemoveAllPools(
304 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
305 allocator->Delete(localAllocator);
306 }
307 }
308
309 /* static */
310 template <InternalAllocatorConfig CONFIG>
RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator * localAllocator)311 void InternalAllocator<CONFIG>::RemoveFreePoolsForLocalInternalAllocator(LocalSmallObjectAllocator *localAllocator)
312 {
313 (void)localAllocator;
314 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
315 if constexpr (CONFIG == InternalAllocatorConfig::PANDA_ALLOCATORS) {
316 localAllocator->VisitAndRemoveFreePools(
317 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
318 }
319 }
320
321 template <InternalAllocatorConfig CONFIG>
InitInternalAllocatorFromRuntime(Allocator * allocator)322 void InternalAllocator<CONFIG>::InitInternalAllocatorFromRuntime(Allocator *allocator)
323 {
324 ASSERT(allocatorFromRuntime_ == nullptr);
325 allocatorFromRuntime_ = allocator;
326 }
327
328 template <InternalAllocatorConfig CONFIG>
GetInternalAllocatorFromRuntime()329 Allocator *InternalAllocator<CONFIG>::GetInternalAllocatorFromRuntime()
330 {
331 return allocatorFromRuntime_;
332 }
333
334 template <InternalAllocatorConfig CONFIG>
ClearInternalAllocatorFromRuntime()335 void InternalAllocator<CONFIG>::ClearInternalAllocatorFromRuntime()
336 {
337 allocatorFromRuntime_ = nullptr;
338 }
339
340 template class InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>;
341 template class InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>;
342 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::GLOBAL>(size_t,
343 Alignment);
344 template void *InternalAllocator<InternalAllocatorConfig::PANDA_ALLOCATORS>::Alloc<AllocScope::LOCAL>(size_t,
345 Alignment);
346 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::GLOBAL>(size_t,
347 Alignment);
348 template void *InternalAllocator<InternalAllocatorConfig::MALLOC_ALLOCATOR>::Alloc<AllocScope::LOCAL>(size_t,
349 Alignment);
350
351 #undef LOG_INTERNAL_ALLOCATOR
352
353 } // namespace panda::mem
354