1 /**
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #ifndef PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_INL_H
16 #define PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_INL_H
17
18 #include "runtime/mem/alloc_config.h"
19 #include "runtime/mem/humongous_obj_allocator.h"
20 #include "runtime/mem/object_helpers.h"
21
22 namespace ark::mem {
23
24 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
25 #define LOG_HUMONGOUS_OBJ_ALLOCATOR(level) LOG(level, ALLOC) << "HumongousObjAllocator: "
26
27 template <typename AllocConfigT, typename LockConfigT>
HumongousObjAllocator(MemStatsType * memStats,SpaceType typeAllocation)28 HumongousObjAllocator<AllocConfigT, LockConfigT>::HumongousObjAllocator(MemStatsType *memStats,
29 SpaceType typeAllocation)
30 : typeAllocation_(typeAllocation), memStats_(memStats)
31 {
32 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Initializing HumongousObjAllocator";
33 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Initializing HumongousObjAllocator finished";
34 }
35
36 template <typename AllocConfigT, typename LockConfigT>
~HumongousObjAllocator()37 HumongousObjAllocator<AllocConfigT, LockConfigT>::~HumongousObjAllocator()
38 {
39 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Destroying HumongousObjAllocator";
40 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Destroying HumongousObjAllocator finished";
41 }
42
43 template <typename AllocConfigT, typename LockConfigT>
44 template <bool NEED_LOCK>
Alloc(const size_t size,const Alignment align)45 void *HumongousObjAllocator<AllocConfigT, LockConfigT>::Alloc(const size_t size, const Alignment align)
46 {
47 os::memory::WriteLockHolder<LockConfigT, NEED_LOCK> wlock(allocFreeLock_);
48 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to allocate memory with size " << size;
49
50 // Check that we can get a memory header for the memory pointer by using PAGE_SIZE_MASK mask
51 if (UNLIKELY(PAGE_SIZE <= sizeof(MemoryPoolHeader) + GetAlignmentInBytes(align))) {
52 ASSERT(PAGE_SIZE > sizeof(MemoryPoolHeader) + GetAlignmentInBytes(align));
53 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The align is too big for this allocator. Return nullptr.";
54 return nullptr;
55 }
56
57 // NOTE(aemelenko): this is quite raw approximation.
58 // We can save about sizeof(MemoryPoolHeader) / 2 bytes here
59 // (BTW, it is not so much for MB allocations)
60 size_t alignedSize = size + sizeof(MemoryPoolHeader) + GetAlignmentInBytes(align);
61
62 void *mem = nullptr;
63
64 if (UNLIKELY(alignedSize > HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE)) {
65 // the size is too big
66 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The size is too big for this allocator. Return nullptr.";
67 return nullptr;
68 }
69
70 // First try to find suitable block in Reserved pools
71 MemoryPoolHeader *memHeader = reservedPoolsList_.FindSuitablePool(alignedSize);
72 if (memHeader != nullptr) {
73 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Find reserved memory block with size " << memHeader->GetPoolSize();
74 reservedPoolsList_.Pop(memHeader);
75 memHeader->Alloc(size, align);
76 mem = memHeader->GetMemory();
77 } else {
78 memHeader = freePoolsList_.FindSuitablePool(alignedSize);
79 if (memHeader != nullptr) {
80 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Find free memory block with size " << memHeader->GetPoolSize();
81 freePoolsList_.Pop(memHeader);
82 memHeader->Alloc(size, align);
83 mem = memHeader->GetMemory();
84 } else {
85 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Can't find memory for this size";
86 return nullptr;
87 }
88 }
89 occupiedPoolsList_.Insert(memHeader);
90 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Allocated memory at addr " << std::hex << mem;
91 AllocConfigT::OnAlloc(memHeader->GetPoolSize(), typeAllocation_, memStats_);
92 ASAN_UNPOISON_MEMORY_REGION(mem, size);
93 AllocConfigT::MemoryInit(mem);
94 ReleaseUnusedPagesOnAlloc(memHeader, size);
95 return mem;
96 }
97
98 template <typename AllocConfigT, typename LockConfigT>
Free(void * mem)99 void HumongousObjAllocator<AllocConfigT, LockConfigT>::Free(void *mem)
100 {
101 os::memory::WriteLockHolder wlock(allocFreeLock_);
102 FreeUnsafe(mem);
103 }
104
105 template <typename AllocConfigT, typename LockConfigT>
FreeUnsafe(void * mem)106 void HumongousObjAllocator<AllocConfigT, LockConfigT>::FreeUnsafe(void *mem)
107 {
108 if (UNLIKELY(mem == nullptr)) {
109 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to free memory at invalid addr 0";
110 return;
111 }
112 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to free memory at addr " << std::hex << mem;
113 #ifndef NDEBUG
114 if (!AllocatedByHumongousObjAllocatorUnsafe(mem)) {
115 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to free memory not from this allocator";
116 return;
117 }
118 #endif // !NDEBUG
119
120 // Each memory pool is PAGE_SIZE aligned, so to get a header we need just to align a pointer
121 auto memHeader = static_cast<MemoryPoolHeader *>(ToVoidPtr(ToUintPtr(mem) & PAGE_SIZE_MASK));
122 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "It is a MemoryPoolHeader with addr " << std::hex << memHeader << " and size "
123 << std::dec << memHeader->GetPoolSize();
124 occupiedPoolsList_.Pop(memHeader);
125 AllocConfigT::OnFree(memHeader->GetPoolSize(), typeAllocation_, memStats_);
126 ASAN_POISON_MEMORY_REGION(memHeader, memHeader->GetPoolSize());
127 InsertPool(memHeader);
128 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Freed memory at addr " << std::hex << mem;
129 }
130
131 template <typename AllocConfigT, typename LockConfigT>
Collect(const GCObjectVisitor & deathCheckerFn)132 void HumongousObjAllocator<AllocConfigT, LockConfigT>::Collect(const GCObjectVisitor &deathCheckerFn)
133 {
134 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Collecting started";
135 IterateOverObjects([this, &deathCheckerFn](ObjectHeader *objectHeader) {
136 if (deathCheckerFn(objectHeader) == ObjectStatus::DEAD_OBJECT) {
137 LOG(DEBUG, GC) << "DELETE OBJECT " << GetDebugInfoAboutObject(objectHeader);
138 FreeUnsafe(objectHeader);
139 }
140 });
141 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Collecting finished";
142 }
143
144 template <typename AllocConfigT, typename LockConfigT>
145 template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & objectVisitor)146 void HumongousObjAllocator<AllocConfigT, LockConfigT>::IterateOverObjects(const ObjectVisitor &objectVisitor)
147 {
148 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Iterating over objects started";
149 MemoryPoolHeader *currentPool = nullptr;
150 {
151 os::memory::ReadLockHolder rlock(allocFreeLock_);
152 currentPool = occupiedPoolsList_.GetListHead();
153 }
154 while (currentPool != nullptr) {
155 os::memory::WriteLockHolder wlock(allocFreeLock_);
156 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << " check pool at addr " << std::hex << currentPool;
157 MemoryPoolHeader *next = currentPool->GetNext();
158 objectVisitor(static_cast<ObjectHeader *>(currentPool->GetMemory()));
159 currentPool = next;
160 }
161 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Iterating over objects finished";
162 }
163
164 template <typename AllocConfigT, typename LockConfigT>
AddMemoryPool(void * mem,size_t size)165 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::AddMemoryPool(void *mem, size_t size)
166 {
167 os::memory::WriteLockHolder wlock(allocFreeLock_);
168 ASSERT(mem != nullptr);
169 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Add memory pool to HumongousObjAllocator from " << std::hex << mem
170 << " with size " << std::dec << size;
171 if (AlignUp(ToUintPtr(mem), PAGE_SIZE) != ToUintPtr(mem)) {
172 return false;
173 }
174 auto mempoolHeader = static_cast<MemoryPoolHeader *>(mem);
175 mempoolHeader->Initialize(size, nullptr, nullptr);
176 InsertPool(mempoolHeader);
177 ASAN_POISON_MEMORY_REGION(mem, size);
178 return true;
179 }
180
181 template <typename AllocConfigT, typename LockConfigT>
ReleaseUnusedPagesOnAlloc(MemoryPoolHeader * memoryPool,size_t allocSize)182 void HumongousObjAllocator<AllocConfigT, LockConfigT>::ReleaseUnusedPagesOnAlloc(MemoryPoolHeader *memoryPool,
183 size_t allocSize)
184 {
185 ASSERT(memoryPool != nullptr);
186 uintptr_t allocAddr = ToUintPtr(memoryPool->GetMemory());
187 uintptr_t poolAddr = ToUintPtr(memoryPool);
188 size_t poolSize = memoryPool->GetPoolSize();
189 uintptr_t firstFreePage = AlignUp(allocAddr + allocSize, os::mem::GetPageSize());
190 uintptr_t endOfLastFreePage = os::mem::AlignDownToPageSize(poolAddr + poolSize);
191 if (firstFreePage < endOfLastFreePage) {
192 os::mem::ReleasePages(firstFreePage, endOfLastFreePage);
193 }
194 }
195
196 template <typename AllocConfigT, typename LockConfigT>
InsertPool(MemoryPoolHeader * header)197 void HumongousObjAllocator<AllocConfigT, LockConfigT>::InsertPool(MemoryPoolHeader *header)
198 {
199 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to insert pool with size " << header->GetPoolSize()
200 << " in Reserved memory";
201 // Try to insert it into ReservedMemoryPools
202 MemoryPoolHeader *memHeader = reservedPoolsList_.TryToInsert(header);
203 if (memHeader == nullptr) {
204 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Successfully inserted in Reserved memory";
205 // We successfully insert header into ReservedMemoryPools
206 return;
207 }
208 // We have a crowded out pool or the "header" argument in mem_header
209 // Insert it into free_pools
210 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Couldn't insert into Reserved memory. Insert in free pools";
211 freePoolsList_.Insert(memHeader);
212 }
213
214 template <typename AllocConfigT, typename LockConfigT>
215 template <typename MemVisitor>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)216 void HumongousObjAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
217 {
218 // We call this method and return pools to the system.
219 // Therefore, delete all objects to clear all external dependences
220 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Clear all objects inside the allocator";
221 os::memory::WriteLockHolder wlock(allocFreeLock_);
222 occupiedPoolsList_.IterateAndPopOverPools(memVisitor);
223 reservedPoolsList_.IterateAndPopOverPools(memVisitor);
224 freePoolsList_.IterateAndPopOverPools(memVisitor);
225 }
226
227 template <typename AllocConfigT, typename LockConfigT>
228 template <typename MemVisitor>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)229 void HumongousObjAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
230 {
231 os::memory::WriteLockHolder wlock(allocFreeLock_);
232 freePoolsList_.IterateAndPopOverPools(memVisitor);
233 }
234
235 template <typename AllocConfigT, typename LockConfigT>
236 template <typename MemVisitor>
IterateOverObjectsInRange(const MemVisitor & memVisitor,void * leftBorder,void * rightBorder)237 void HumongousObjAllocator<AllocConfigT, LockConfigT>::IterateOverObjectsInRange(const MemVisitor &memVisitor,
238 void *leftBorder, void *rightBorder)
239 {
240 // NOTE: Current implementation doesn't look at PANDA_CROSSING_MAP_MANAGE_CROSSED_BORDER flag
241 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "HumongousObjAllocator::IterateOverObjectsInRange for range [" << std::hex
242 << leftBorder << ", " << rightBorder << "]";
243 ASSERT(ToUintPtr(rightBorder) >= ToUintPtr(leftBorder));
244 // NOTE(aemelenko): These are temporary asserts because we can't do anything
245 // if the range crosses different allocators memory pools
246 ASSERT(ToUintPtr(rightBorder) - ToUintPtr(leftBorder) == (CrossingMapSingleton::GetCrossingMapGranularity() - 1U));
247 ASSERT((ToUintPtr(rightBorder) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))) ==
248 (ToUintPtr(leftBorder) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))));
249
250 // Try to find a pool with this range
251 MemoryPoolHeader *discoveredPool = nullptr;
252 MemoryPoolHeader *currentPool = nullptr;
253 {
254 os::memory::ReadLockHolder rlock(allocFreeLock_);
255 currentPool = occupiedPoolsList_.GetListHead();
256 }
257 while (currentPool != nullptr) {
258 // Use current pool here because it is page aligned
259 uintptr_t currentPoolStart = ToUintPtr(currentPool);
260 uintptr_t currentPoolEnd = ToUintPtr(currentPool->GetMemory()) + currentPool->GetPoolSize();
261 if (currentPoolStart <= ToUintPtr(leftBorder)) {
262 // Check that this range is located in the same pool
263 if (currentPoolEnd >= ToUintPtr(rightBorder)) {
264 discoveredPool = currentPool;
265 break;
266 }
267 }
268 {
269 os::memory::ReadLockHolder rlock(allocFreeLock_);
270 currentPool = currentPool->GetNext();
271 }
272 }
273
274 if (discoveredPool != nullptr) {
275 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG)
276 << "HumongousObjAllocator: It is a MemoryPoolHeader with addr " << std::hex << discoveredPool
277 << " and size " << std::dec << discoveredPool->GetPoolSize();
278 memVisitor(static_cast<ObjectHeader *>(discoveredPool->GetMemory()));
279 } else {
280 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG)
281 << "HumongousObjAllocator This memory range is not covered by this allocator";
282 }
283 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "HumongousObjAllocator::IterateOverObjectsInRange finished";
284 }
285
286 template <typename AllocConfigT, typename LockConfigT>
AllocatedByHumongousObjAllocator(void * mem)287 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::AllocatedByHumongousObjAllocator(void *mem)
288 {
289 os::memory::ReadLockHolder rlock(allocFreeLock_);
290 return AllocatedByHumongousObjAllocatorUnsafe(mem);
291 }
292
293 template <typename AllocConfigT, typename LockConfigT>
AllocatedByHumongousObjAllocatorUnsafe(void * mem)294 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::AllocatedByHumongousObjAllocatorUnsafe(void *mem)
295 {
296 MemoryPoolHeader *currentPool = occupiedPoolsList_.GetListHead();
297 while (currentPool != nullptr) {
298 if (currentPool->GetMemory() == mem) {
299 return true;
300 }
301 currentPool = currentPool->GetNext();
302 }
303 return false;
304 }
305
306 template <typename AllocConfigT, typename LockConfigT>
Initialize(size_t size,MemoryPoolHeader * prev,MemoryPoolHeader * next)307 ATTRIBUTE_NO_SANITIZE_ADDRESS void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::Initialize(
308 size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next)
309 {
310 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
311 poolSize_ = size;
312 prev_ = prev;
313 next_ = next;
314 memAddr_ = nullptr;
315 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
316 }
317
318 template <typename AllocConfigT, typename LockConfigT>
Alloc(size_t size,Alignment align)319 ATTRIBUTE_NO_SANITIZE_ADDRESS void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::Alloc(
320 size_t size, Alignment align)
321 {
322 (void)size;
323 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
324 memAddr_ = ToVoidPtr(AlignUp(ToUintPtr(this) + sizeof(MemoryPoolHeader), GetAlignmentInBytes(align)));
325 ASSERT(ToUintPtr(memAddr_) + size <= ToUintPtr(this) + poolSize_);
326 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
327 }
328
329 template <typename AllocConfigT, typename LockConfigT>
PopHeader()330 ATTRIBUTE_NO_SANITIZE_ADDRESS void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::PopHeader()
331 {
332 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
333 if (prev_ != nullptr) {
334 ASAN_UNPOISON_MEMORY_REGION(prev_, sizeof(MemoryPoolHeader));
335 prev_->SetNext(next_);
336 ASAN_POISON_MEMORY_REGION(prev_, sizeof(MemoryPoolHeader));
337 }
338 if (next_ != nullptr) {
339 ASAN_UNPOISON_MEMORY_REGION(next_, sizeof(MemoryPoolHeader));
340 next_->SetPrev(prev_);
341 ASAN_POISON_MEMORY_REGION(next_, sizeof(MemoryPoolHeader));
342 }
343 next_ = nullptr;
344 prev_ = nullptr;
345 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
346 }
347
348 template <typename AllocConfigT, typename LockConfigT>
Pop(MemoryPoolHeader * pool)349 void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::Pop(MemoryPoolHeader *pool)
350 {
351 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Pop a pool with addr " << std::hex << pool << " from the pool list";
352 ASSERT(IsInThisList(pool));
353 if (head_ == pool) {
354 head_ = pool->GetNext();
355 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "It was a pointer to list head. Change head to " << std::hex << head_;
356 }
357 pool->PopHeader();
358 }
359
360 template <typename AllocConfigT, typename LockConfigT>
Insert(MemoryPoolHeader * pool)361 void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::Insert(MemoryPoolHeader *pool)
362 {
363 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Insert a pool with addr " << std::hex << pool << " into the pool list";
364 if (head_ != nullptr) {
365 head_->SetPrev(pool);
366 } else {
367 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The head was not initialized. Set it up.";
368 }
369 pool->SetNext(head_);
370 pool->SetPrev(nullptr);
371 head_ = pool;
372 }
373
374 template <typename AllocConfigT, typename LockConfigT>
375 // CC-OFFNXT(G.FMT.07) project code style
376 typename HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader *
FindSuitablePool(size_t size)377 HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::FindSuitablePool(size_t size)
378 {
379 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to find suitable pool for memory with size " << size;
380 MemoryPoolHeader *curPool = head_;
381 while (curPool != nullptr) {
382 if (curPool->GetPoolSize() >= size) {
383 break;
384 }
385 curPool = curPool->GetNext();
386 }
387 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Found a pool with addr " << std::hex << curPool;
388 return curPool;
389 }
390
391 template <typename AllocConfigT, typename LockConfigT>
IsInThisList(MemoryPoolHeader * pool)392 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::IsInThisList(MemoryPoolHeader *pool)
393 {
394 // NOTE(aemelenko): Do it only in debug build
395 MemoryPoolHeader *curPool = head_;
396 while (curPool != nullptr) {
397 if (curPool == pool) {
398 break;
399 }
400 curPool = curPool->GetNext();
401 }
402 return curPool != nullptr;
403 }
404
405 template <typename AllocConfigT, typename LockConfigT>
406 template <typename MemVisitor>
IterateAndPopOverPools(const MemVisitor & memVisitor)407 void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::IterateAndPopOverPools(
408 const MemVisitor &memVisitor)
409 {
410 MemoryPoolHeader *currentPool = head_;
411 while (currentPool != nullptr) {
412 MemoryPoolHeader *tmp = currentPool->GetNext();
413 this->Pop(currentPool);
414 memVisitor(currentPool, currentPool->GetPoolSize());
415 currentPool = tmp;
416 }
417 }
418
419 template <typename AllocConfigT, typename LockConfigT>
420 // CC-OFFNXT(G.FMT.07) project code style
421 typename HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader *
TryToInsert(MemoryPoolHeader * pool)422 HumongousObjAllocator<AllocConfigT, LockConfigT>::ReservedMemoryPools::TryToInsert(MemoryPoolHeader *pool)
423 {
424 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to insert a pool in Reserved memory with addr " << std::hex << pool;
425 if (pool->GetPoolSize() > MAX_POOL_SIZE) {
426 // This pool is too big for inserting in Reserved
427 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "It is too big for Reserved memory";
428 return pool;
429 }
430 if (elementsCount_ < MAX_POOLS_AMOUNT) {
431 // We can insert the memory pool to Reserved
432 SortedInsert(pool);
433 elementsCount_++;
434 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "We don't have max amount of elements in Reserved list. Just insert.";
435 return nullptr;
436 }
437 // We have the max amount of elements in the Reserved pools list
438 // Try to swap the smallest pool (which is the first because it is ordered list)
439 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "We have max amount of elements in Reserved list.";
440 MemoryPoolHeader *smallestPool = this->GetListHead();
441 if (smallestPool == nullptr) {
442 // It is the only variant when smallest_pool can be equal to nullptr.
443 ASSERT(MAX_POOLS_AMOUNT == 0);
444 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "MAX_POOLS_AMOUNT for Reserved list is equal to zero. Do nothing";
445 return pool;
446 }
447 ASSERT(smallestPool != nullptr);
448 if (smallestPool->GetPoolSize() >= pool->GetPoolSize()) {
449 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The pool is too small. Do not insert it";
450 return pool;
451 }
452 // Just pop this element from the list. Do not update elements_count_ value
453 MemoryPoolList::Pop(smallestPool);
454 SortedInsert(pool);
455 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Swap the smallest element in Reserved list with addr " << std::hex
456 << smallestPool;
457 return smallestPool;
458 }
459
460 template <typename AllocConfigT, typename LockConfigT>
SortedInsert(MemoryPoolHeader * pool)461 void HumongousObjAllocator<AllocConfigT, LockConfigT>::ReservedMemoryPools::SortedInsert(MemoryPoolHeader *pool)
462 {
463 size_t poolSize = pool->GetPoolSize();
464 MemoryPoolHeader *listHead = this->GetListHead();
465 if (listHead == nullptr) {
466 this->Insert(pool);
467 return;
468 }
469 if (listHead->GetPoolSize() >= poolSize) {
470 // Do this comparison to not update head_ in this method
471 this->Insert(pool);
472 return;
473 }
474 MemoryPoolHeader *cur = listHead;
475 while (cur != nullptr) {
476 if (cur->GetPoolSize() >= poolSize) {
477 pool->SetNext(cur);
478 pool->SetPrev(cur->GetPrev());
479 cur->GetPrev()->SetNext(pool);
480 cur->SetPrev(pool);
481 return;
482 }
483 MemoryPoolHeader *next = cur->GetNext();
484 if (next == nullptr) {
485 cur->SetNext(pool);
486 pool->SetNext(nullptr);
487 pool->SetPrev(cur);
488 return;
489 }
490 cur = next;
491 }
492 }
493
494 template <typename AllocConfigT, typename LockConfigT>
ContainObject(const ObjectHeader * obj)495 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::ContainObject(const ObjectHeader *obj)
496 {
497 return AllocatedByHumongousObjAllocatorUnsafe(const_cast<ObjectHeader *>(obj));
498 }
499
500 template <typename AllocConfigT, typename LockConfigT>
IsLive(const ObjectHeader * obj)501 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::IsLive(const ObjectHeader *obj)
502 {
503 ASSERT(ContainObject(obj));
504 auto *memHeader = static_cast<MemoryPoolHeader *>(ToVoidPtr(ToUintPtr(obj) & PAGE_SIZE_MASK));
505 ASSERT(PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(
506 // CC-OFFNXT(G.FMT.06,G.FMT.06-CPP) project code style
507 static_cast<void *>(const_cast<ObjectHeader *>(obj))) == static_cast<void *>(memHeader));
508 return memHeader->GetMemory() == static_cast<void *>(const_cast<ObjectHeader *>(obj));
509 }
510
511 #undef LOG_HUMONGOUS_OBJ_ALLOCATOR
512
513 } // namespace ark::mem
514
515 #endif // PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_INL_H
516