• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_MEM_FREELIST_ALLOCATOR_INL_H
16 #define PANDA_MEM_FREELIST_ALLOCATOR_INL_H
17 
18 #include "libpandabase/utils/logger.h"
19 #include "runtime/mem/alloc_config.h"
20 #include "runtime/mem/freelist_allocator.h"
21 #include "runtime/mem/object_helpers.h"
22 
23 namespace ark::mem {
24 
25 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
26 #define LOG_FREELIST_ALLOCATOR(level) LOG(level, ALLOC) << "FreeListAllocator: "
27 
28 template <typename AllocConfigT, typename LockConfigT>
FreeListAllocator(MemStatsType * memStats,SpaceType typeAllocation)29 FreeListAllocator<AllocConfigT, LockConfigT>::FreeListAllocator(MemStatsType *memStats, SpaceType typeAllocation)
30     : typeAllocation_(typeAllocation), memStats_(memStats)
31 {
32     LOG_FREELIST_ALLOCATOR(DEBUG) << "Initializing FreeListAllocator";
33     ASAN_POISON_MEMORY_REGION(&segregatedList_, sizeof(segregatedList_));
34     LOG_FREELIST_ALLOCATOR(DEBUG) << "Initializing FreeListAllocator finished";
35 }
36 
37 template <typename AllocConfigT, typename LockConfigT>
~FreeListAllocator()38 FreeListAllocator<AllocConfigT, LockConfigT>::~FreeListAllocator()
39 {
40     LOG_FREELIST_ALLOCATOR(DEBUG) << "Destroying FreeListAllocator";
41     ASAN_UNPOISON_MEMORY_REGION(&segregatedList_, sizeof(segregatedList_));
42     LOG_FREELIST_ALLOCATOR(DEBUG) << "Destroying FreeListAllocator finished";
43 }
44 
45 template <typename AllocConfigT, typename LockConfigT>
46 template <bool NEED_LOCK>
Alloc(size_t size,Alignment align)47 void *FreeListAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
48 {
49     os::memory::WriteLockHolder<LockConfigT, NEED_LOCK> wlock(allocFreeLock_);
50     LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to allocate object with size " << std::dec << size;
51     size_t allocSize = size;
52     if (allocSize < FREELIST_ALLOCATOR_MIN_SIZE) {
53         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to allocate an object with size less than min for this allocator";
54         allocSize = FREELIST_ALLOCATOR_MIN_SIZE;
55     }
56     size_t defAlignedSize = AlignUp(allocSize, GetAlignmentInBytes(FREELIST_DEFAULT_ALIGNMENT));
57     if (defAlignedSize > allocSize) {
58         allocSize = defAlignedSize;
59         LOG_FREELIST_ALLOCATOR(DEBUG) << "Align size to default alignment. New size = " << allocSize;
60     }
61     if (allocSize > FREELIST_MAX_ALLOC_SIZE) {
62         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try allocate too big memory for free list allocator. Return nullptr";
63         return nullptr;
64     }
65     // Get best-fit memory piece from segregated list.
66     MemoryBlockHeader *memoryBlock = GetFromSegregatedList(allocSize, align);
67     if (memoryBlock == nullptr) {
68         LOG_FREELIST_ALLOCATOR(DEBUG) << "Couldn't allocate memory";
69         return nullptr;
70     }
71     LOG_FREELIST_ALLOCATOR(DEBUG) << "Found memory block at addr = " << std::hex << memoryBlock << " with size "
72                                   << std::dec << memoryBlock->GetSize();
73     ASSERT(memoryBlock->GetSize() >= allocSize);
74     uintptr_t memoryPointer = ToUintPtr(memoryBlock->GetMemory());
75     bool requiredPadding = false;
76     if ((memoryPointer & (GetAlignmentInBytes(align) - 1)) != 0U) {
77         LOG_FREELIST_ALLOCATOR(DEBUG) << "Raw memory is not aligned as we need. Create special header for padding";
78         // Raw memory pointer is not aligned as we expected
79         // We need to create extra header inside
80         uintptr_t alignedMemoryPointer = AlignUp(memoryPointer + sizeof(MemoryBlockHeader), GetAlignmentInBytes(align));
81         size_t sizeWithPadding = allocSize + (alignedMemoryPointer - memoryPointer);
82         ASSERT(memoryBlock->GetSize() >= sizeWithPadding);
83         auto paddingHeader =
84             static_cast<MemoryBlockHeader *>(ToVoidPtr(alignedMemoryPointer - sizeof(MemoryBlockHeader)));
85         LOG_FREELIST_ALLOCATOR(DEBUG) << "Created padding header at addr " << std::hex << paddingHeader;
86         paddingHeader->Initialize(allocSize, memoryBlock);
87         paddingHeader->SetAsPaddingHeader();
88         // Update values
89         memoryPointer = alignedMemoryPointer;
90         allocSize = sizeWithPadding;
91         requiredPadding = true;
92     }
93     if (CanCreateNewBlockFromRemainder(memoryBlock, allocSize)) {
94         LOG_FREELIST_ALLOCATOR(DEBUG) << "Created new memory block from the remainder part:";
95         MemoryBlockHeader *newFreeBlock = SplitMemoryBlocks(memoryBlock, allocSize);
96         LOG_FREELIST_ALLOCATOR(DEBUG) << "New block started at addr " << std::hex << newFreeBlock << " with size "
97                                       << std::dec << newFreeBlock->GetSize();
98         memoryBlock->SetUsed();
99         FreeListHeader *newFreeListElement = TryToCoalescing(newFreeBlock);
100         ASSERT(!newFreeListElement->IsUsed());
101         AddToSegregatedList(newFreeListElement);
102     } else {
103         LOG_FREELIST_ALLOCATOR(DEBUG) << "Can't create new block from the remainder. Use full block.";
104         memoryBlock->SetUsed();
105     }
106     if (requiredPadding) {
107         // We must update some values in current memory_block
108         uintptr_t paddingSize = memoryPointer - ToUintPtr(memoryBlock->GetMemory());
109         if (paddingSize == sizeof(MemoryBlockHeader)) {
110             LOG_FREELIST_ALLOCATOR(DEBUG) << "SetHasPaddingHeaderAfter";
111             memoryBlock->SetPaddingHeaderStoredAfterHeader();
112         } else {
113             LOG_FREELIST_ALLOCATOR(DEBUG) << "SetHasPaddingSizeAfter, size = " << paddingSize;
114             memoryBlock->SetPaddingSizeStoredAfterHeader();
115             memoryBlock->SetPaddingSize(paddingSize);
116         }
117     }
118     LOG_FREELIST_ALLOCATOR(DEBUG) << "Allocated memory at addr " << std::hex << ToVoidPtr(memoryPointer);
119     {
120         AllocConfigT::OnAlloc(memoryBlock->GetSize(), typeAllocation_, memStats_);
121         // It is not the object size itself, because we can't compute it from MemoryBlockHeader structure at Free call.
122         // It is an approximation.
123         size_t currentSize =
124             ToUintPtr(memoryBlock) + memoryBlock->GetSize() + sizeof(MemoryBlockHeader) - memoryPointer;
125         AllocConfigT::AddToCrossingMap(ToVoidPtr(memoryPointer), currentSize);
126     }
127     ASAN_UNPOISON_MEMORY_REGION(ToVoidPtr(memoryPointer), size);
128     AllocConfigT::MemoryInit(ToVoidPtr(memoryPointer));
129     return ToVoidPtr(memoryPointer);
130 }
131 
132 template <typename AllocConfigT, typename LockConfigT>
Free(void * mem)133 void FreeListAllocator<AllocConfigT, LockConfigT>::Free(void *mem)
134 {
135     os::memory::WriteLockHolder wlock(allocFreeLock_);
136     FreeUnsafe(mem);
137 }
138 
139 template <typename AllocConfigT, typename LockConfigT>
FreeUnsafe(void * mem)140 void FreeListAllocator<AllocConfigT, LockConfigT>::FreeUnsafe(void *mem)
141 {
142     if (UNLIKELY(mem == nullptr)) {
143         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to free memory at invalid addr 0";
144         return;
145     }
146     LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to free memory at addr " << std::hex << mem;
147 #ifndef NDEBUG
148     if (!AllocatedByFreeListAllocatorUnsafe(mem)) {
149         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to free memory no from this allocator";
150         return;
151     }
152 #endif  // !NDEBUG
153 
154     MemoryBlockHeader *memoryHeader = GetFreeListMemoryHeader(mem);
155     LOG_FREELIST_ALLOCATOR(DEBUG) << "It is a memory with header " << std::hex << memoryHeader << " with size "
156                                   << std::dec << memoryHeader->GetSize() << " (probably with padding)";
157     {
158         AllocConfigT::OnFree(memoryHeader->GetSize(), typeAllocation_, memStats_);
159         // It is not the object size itself, because we can't compute it from MemoryBlockHeader structure.
160         // It is an approximation.
161         size_t currentSize =
162             ToUintPtr(memoryHeader) + memoryHeader->GetSize() + sizeof(MemoryBlockHeader) - ToUintPtr(mem);
163         MemoryBlockHeader *prevUsedHeader = memoryHeader->GetPrevUsedHeader();
164         void *prevObject = nullptr;
165         size_t prevSize = 0;
166         if (prevUsedHeader != nullptr) {
167             prevObject = prevUsedHeader->GetMemory();
168             prevSize = ToUintPtr(prevUsedHeader) + prevUsedHeader->GetSize() + sizeof(MemoryBlockHeader) -
169                        ToUintPtr(prevUsedHeader->GetMemory());
170         }
171         MemoryBlockHeader *nextUsedHeader = memoryHeader->GetNextUsedHeader();
172         void *nextObject = nullptr;
173         if (nextUsedHeader != nullptr) {
174             nextObject = nextUsedHeader->GetMemory();
175         }
176         AllocConfigT::RemoveFromCrossingMap(mem, currentSize, nextObject, prevObject, prevSize);
177     }
178     memoryHeader->SetUnused();
179     memoryHeader->SetCommonHeader();
180     FreeListHeader *newFreeListElement = TryToCoalescing(memoryHeader);
181     ASAN_POISON_MEMORY_REGION(newFreeListElement, newFreeListElement->GetSize() + sizeof(MemoryBlockHeader));
182     AddToSegregatedList(newFreeListElement);
183     LOG_FREELIST_ALLOCATOR(DEBUG) << "Freed memory at addr " << std::hex << mem;
184 }
185 
186 // NOTE(aemelenko): We can create a mutex for each pool to increase performance,
187 // but it requires pool alignment restriction
188 // (we must compute memory pool header addr from a memory block addr stored inside it)
189 
190 // During Collect method call we iterate over memory blocks in each pool.
191 // This iteration can cause race conditions in multithreaded mode.
192 // E.g. in this scenario:
193 // |-------|---------|-------------------|------------------------------------------------------------------|
194 // | time: | Thread: |    Description:   |                         Memory footprint:                        |
195 // |-------|---------|-------------------|------------------------------------------------------------------|
196 // |       |         | Thread0 starts    |  |..............Free  Block.............|...Allocated block...|  |
197 // |       |         | iterating         |  |                                                               |
198 // |   0   |    0    | over mem blocks   |  current block pointer                                           |
199 // |       |         | and current block |                                                                  |
200 // |       |         | is free block     |                                                                  |
201 // |-------|---------|-------------------|------------------------------------------------------------------|
202 // |       |         | Thread1           |  |...Allocated block...|................|...Allocated block...|  |
203 // |   1   |    1    | allocates memory  |                        |                                         |
204 // |       |         | at this block     |               Unused memory peace                                |
205 // |       |         |                   |                                                                  |
206 // |-------|---------|-------------------|------------------------------------------------------------------|
207 // |       |         | Thread1           |  |...Allocated block...|................|...Allocated block...|  |
208 // |   2   |    1    | set up values in  |  |                                                               |
209 // |       |         | this block header |  change size of this block                                       |
210 // |       |         | (set up size)     |                                                                  |
211 // |-------|---------|-------------------|------------------------------------------------------------------|
212 // |       |         | Thread0 reads     |  |...Allocated block...|................|...Allocated block...|  |
213 // |       |         | some garbage or   |                                                                  |
214 // |   3   |    0    | wrong value to    |  current block pointer - points to wrong memory                  |
215 // |       |         | calculate next    |                                                                  |
216 // |       |         | block pointer     |                                                                  |
217 // |-------|---------|-------------------|------------------------------------------------------------------|
218 //
219 // Therefore, we must unlock allocator's alloc/free methods only
220 // when we have a pointer to allocated block (i.e. IsUsed())
221 
222 template <typename AllocConfigT, typename LockConfigT>
Collect(const GCObjectVisitor & deathCheckerFn)223 void FreeListAllocator<AllocConfigT, LockConfigT>::Collect(const GCObjectVisitor &deathCheckerFn)
224 {
225     LOG_FREELIST_ALLOCATOR(DEBUG) << "Collecting started";
226     // NOTE(aemelenko): Looks like we can unlock alloc_free_lock_ for not dead objects during collection
227     IterateOverObjects([this, &deathCheckerFn](ObjectHeader *mem) {
228         if (deathCheckerFn(mem) == ObjectStatus::DEAD_OBJECT) {
229             LOG(DEBUG, GC) << "DELETE OBJECT " << GetDebugInfoAboutObject(mem);
230             FreeUnsafe(mem);
231         }
232     });
233     LOG_FREELIST_ALLOCATOR(DEBUG) << "Collecting finished";
234 }
235 
236 template <typename AllocConfigT, typename LockConfigT>
237 template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & objectVisitor)238 void FreeListAllocator<AllocConfigT, LockConfigT>::IterateOverObjects(const ObjectVisitor &objectVisitor)
239 {
240     LOG_FREELIST_ALLOCATOR(DEBUG) << "Iterating over objects started";
241     MemoryPoolHeader *currentPool = nullptr;
242     {
243         // Do this under lock because the pointer for mempool_tail can be changed by other threads
244         // in AddMemoryPool method call.
245         // NOTE: We add each new pool at the mempool_tail_. Therefore, we can read it once and iterate to head.
246         os::memory::ReadLockHolder rlock(allocFreeLock_);
247         currentPool = mempoolTail_;
248     }
249     while (currentPool != nullptr) {
250         LOG_FREELIST_ALLOCATOR(DEBUG) << "  iterate over " << std::hex << currentPool;
251         MemoryBlockHeader *currentMemHeader = currentPool->GetFirstMemoryHeader();
252         while (currentMemHeader != nullptr) {
253             // Lock any possible memory headers changes in the allocator.
254             os::memory::WriteLockHolder wlock(allocFreeLock_);
255             if (currentMemHeader->IsUsed()) {
256                 // We can call Free inside mem_visitor, that's why we should lock everything
257                 objectVisitor(static_cast<ObjectHeader *>(currentMemHeader->GetMemory()));
258                 // At this point, current_mem_header can point to a nonexistent memory block
259                 // (e.g., if we coalesce it with the previous free block).
260                 // However, we still have valid MemoryBlockHeader class field here.
261                 // NOTE: Firstly, we coalesce current_mem_header block with next (if it is free)
262                 // and update current block size. Therefore, we have a valid pointer to next memory block.
263 
264                 // After calling mem_visitor() the current_mem_header may points to free block of memory,
265                 // which can be modified at the alloc call in other thread.
266                 // Therefore, do not unlock until we have a pointer to the Used block.
267                 currentMemHeader = currentMemHeader->GetNextUsedHeader();
268             } else {
269                 // Current header marked as Unused so it can be modify by an allocation in some thread.
270                 // So, read next header with in locked state.
271                 currentMemHeader = currentMemHeader->GetNextUsedHeader();
272             }
273             // We have a pointer to Used memory block, or to nullptr.
274             // Therefore, we can unlock.
275         }
276         currentPool = currentPool->GetPrev();
277     }
278     LOG_FREELIST_ALLOCATOR(DEBUG) << "Iterating over objects finished";
279 }
280 
281 template <typename AllocConfigT, typename LockConfigT>
AddMemoryPool(void * mem,size_t size)282 bool FreeListAllocator<AllocConfigT, LockConfigT>::AddMemoryPool(void *mem, size_t size)
283 {
284     // Lock alloc/free because we add new block to segregated list here.
285     os::memory::WriteLockHolder wlock(allocFreeLock_);
286     ASSERT(mem != nullptr);
287     LOG_FREELIST_ALLOCATOR(DEBUG) << "Add memory pool to FreeListAllocator from  " << std::hex << mem << " with size "
288                                   << std::dec << size;
289     ASSERT((ToUintPtr(mem) & (sizeof(MemoryBlockHeader) - 1)) == 0U);
290     auto mempoolHeader = static_cast<MemoryPoolHeader *>(mem);
291     if (mempoolHead_ == nullptr) {
292         LOG_FREELIST_ALLOCATOR(DEBUG) << "Initialize mempool_head_";
293         mempoolHeader->Initialize(size, nullptr, nullptr);
294         mempoolHead_ = mempoolHeader;
295         mempoolTail_ = mempoolHeader;
296     } else {
297         LOG_FREELIST_ALLOCATOR(DEBUG) << "Add this memory pool at the tail after block " << std::hex << mempoolTail_;
298         mempoolHeader->Initialize(size, mempoolTail_, nullptr);
299         mempoolTail_->SetNext(mempoolHeader);
300         mempoolTail_ = mempoolHeader;
301     }
302     MemoryBlockHeader *firstMemHeader = mempoolHeader->GetFirstMemoryHeader();
303     firstMemHeader->Initialize(size - sizeof(MemoryBlockHeader) - sizeof(MemoryPoolHeader), nullptr);
304     firstMemHeader->SetLastBlockInPool();
305     AddToSegregatedList(static_cast<FreeListHeader *>(firstMemHeader));
306     ASAN_POISON_MEMORY_REGION(mem, size);
307     AllocConfigT::InitializeCrossingMapForMemory(mem, size);
308     return true;
309 }
310 
311 template <typename AllocConfigT, typename LockConfigT>
312 template <typename MemVisitor>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)313 void FreeListAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
314 {
315     // We call this method and return pools to the system.
316     // Therefore, delete all objects to clear all external dependences
317     LOG_FREELIST_ALLOCATOR(DEBUG) << "Clear all objects inside the allocator";
318     // Lock everything to avoid race condition.
319     os::memory::WriteLockHolder wlock(allocFreeLock_);
320     MemoryPoolHeader *currentPool = mempoolHead_;
321     while (currentPool != nullptr) {
322         // Use tmp in case if visitor with side effects
323         MemoryPoolHeader *tmp = currentPool->GetNext();
324         AllocConfigT::RemoveCrossingMapForMemory(currentPool, currentPool->GetSize());
325         memVisitor(currentPool, currentPool->GetSize());
326         currentPool = tmp;
327     }
328 }
329 
330 template <typename AllocConfigT, typename LockConfigT>
331 template <typename MemVisitor>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)332 void FreeListAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
333 {
334     // Lock everything to avoid race condition.
335     os::memory::WriteLockHolder wlock(allocFreeLock_);
336     MemoryPoolHeader *currentPool = mempoolHead_;
337     while (currentPool != nullptr) {
338         // Use tmp in case if visitor with side effects
339         MemoryPoolHeader *tmp = currentPool->GetNext();
340         MemoryBlockHeader *firstBlock = currentPool->GetFirstMemoryHeader();
341         if (firstBlock->IsLastBlockInPool() && !firstBlock->IsUsed()) {
342             // We have only one big memory block in this pool,
343             // and this block is not used
344             LOG_FREELIST_ALLOCATOR(DEBUG)
345                 << "VisitAndRemoveFreePools: Remove free memory pool from allocator with start addr" << std::hex
346                 << currentPool << " and size " << std::dec << currentPool->GetSize()
347                 << " bytes with the first block at addr " << std::hex << firstBlock << " and size " << std::dec
348                 << firstBlock->GetSize();
349             auto freeHeader = static_cast<FreeListHeader *>(firstBlock);
350             freeHeader->PopFromFreeList();
351             MemoryPoolHeader *next = currentPool->GetNext();
352             MemoryPoolHeader *prev = currentPool->GetPrev();
353             if (next != nullptr) {
354                 ASSERT(next->GetPrev() == currentPool);
355                 next->SetPrev(prev);
356             } else {
357                 // This means that current pool is the last
358                 ASSERT(mempoolTail_ == currentPool);
359                 LOG_FREELIST_ALLOCATOR(DEBUG) << "VisitAndRemoveFreePools: Change pools tail pointer";
360                 mempoolTail_ = prev;
361             }
362             if (prev != nullptr) {
363                 ASSERT(prev->GetNext() == currentPool);
364                 prev->SetNext(next);
365             } else {
366                 // This means that current pool is the first
367                 ASSERT(mempoolHead_ == currentPool);
368                 LOG_FREELIST_ALLOCATOR(DEBUG) << "VisitAndRemoveFreePools: Change pools head pointer";
369                 mempoolHead_ = next;
370             }
371             AllocConfigT::RemoveCrossingMapForMemory(currentPool, currentPool->GetSize());
372             memVisitor(currentPool, currentPool->GetSize());
373         }
374         currentPool = tmp;
375     }
376     segregatedList_.ReleaseFreeMemoryBlocks();
377 }
378 
379 template <typename AllocConfigT, typename LockConfigT>
380 template <typename MemVisitor>
IterateOverObjectsInRange(const MemVisitor & memVisitor,void * leftBorder,void * rightBorder)381 void FreeListAllocator<AllocConfigT, LockConfigT>::IterateOverObjectsInRange(const MemVisitor &memVisitor,
382                                                                              void *leftBorder, void *rightBorder)
383 {
384     // NOTE: Current implementation doesn't look at PANDA_CROSSING_MAP_MANAGE_CROSSED_BORDER flag
385     LOG_FREELIST_ALLOCATOR(DEBUG) << "FreeListAllocator::IterateOverObjectsInRange for range [" << std::hex
386                                   << leftBorder << ", " << rightBorder << "]";
387     ASSERT(ToUintPtr(rightBorder) >= ToUintPtr(leftBorder));
388     // NOTE(aemelenko): These are temporary asserts because we can't do anything
389     // if the range crosses different allocators memory pools
390     ASSERT(ToUintPtr(rightBorder) - ToUintPtr(leftBorder) == (CrossingMapSingleton::GetCrossingMapGranularity() - 1U));
391     ASSERT((ToUintPtr(rightBorder) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))) ==
392            (ToUintPtr(leftBorder) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))));
393     MemoryBlockHeader *firstMemoryHeader = nullptr;
394     {
395         // Do this under lock because the pointer to the first object in CrossingMap can be changed during
396         // CrossingMap call.
397         os::memory::ReadLockHolder rlock(allocFreeLock_);
398         if (!AllocatedByFreeListAllocatorUnsafe(leftBorder) && !AllocatedByFreeListAllocatorUnsafe(rightBorder)) {
399             LOG_FREELIST_ALLOCATOR(DEBUG) << "This memory range is not covered by this allocator";
400             return;
401         }
402         void *objAddr = AllocConfigT::FindFirstObjInCrossingMap(leftBorder, rightBorder);
403         if (objAddr == nullptr) {
404             return;
405         }
406         ASSERT(AllocatedByFreeListAllocatorUnsafe(objAddr));
407         MemoryBlockHeader *memoryHeader = GetFreeListMemoryHeader(objAddr);
408         // Memory header is a pointer to an object which starts in this range or in the previous.
409         // In the second case, this object may not crosses the border of the current range.
410         // (but there is an object stored after it, which crosses the current range).
411         ASSERT(ToUintPtr(memoryHeader->GetMemory()) <= ToUintPtr(rightBorder));
412         firstMemoryHeader = memoryHeader;
413     }
414     ASSERT(firstMemoryHeader != nullptr);
415     // Let's start iteration:
416     MemoryBlockHeader *currentMemHeader = firstMemoryHeader;
417     LOG_FREELIST_ALLOCATOR(DEBUG) << "FreeListAllocator::IterateOverObjectsInRange start iterating from obj with addr "
418                                   << std::hex << firstMemoryHeader->GetMemory();
419     while (currentMemHeader != nullptr) {
420         // We don't lock allocator because we point to the used block which can't be changed
421         // during the iteration in range.
422         void *objAddr = currentMemHeader->GetMemory();
423         if (ToUintPtr(objAddr) > ToUintPtr(rightBorder)) {
424             // Iteration over
425             break;
426         }
427         LOG_FREELIST_ALLOCATOR(DEBUG)
428             << "FreeListAllocator::IterateOverObjectsInRange found obj in this range with addr " << std::hex << objAddr;
429         memVisitor(static_cast<ObjectHeader *>(objAddr));
430         {
431             os::memory::ReadLockHolder rlock(allocFreeLock_);
432             currentMemHeader = currentMemHeader->GetNextUsedHeader();
433         }
434     }
435     LOG_FREELIST_ALLOCATOR(DEBUG) << "FreeListAllocator::IterateOverObjectsInRange finished";
436 }
437 
438 template <typename AllocConfigT, typename LockConfigT>
CoalesceMemoryBlocks(MemoryBlockHeader * firstBlock,MemoryBlockHeader * secondBlock)439 void FreeListAllocator<AllocConfigT, LockConfigT>::CoalesceMemoryBlocks(MemoryBlockHeader *firstBlock,
440                                                                         MemoryBlockHeader *secondBlock)
441 {
442     LOG_FREELIST_ALLOCATOR(DEBUG) << "CoalesceMemoryBlock: "
443                                   << "first block = " << std::hex << firstBlock << " with size " << std::dec
444                                   << firstBlock->GetSize() << " ; second block = " << std::hex << secondBlock
445                                   << " with size " << std::dec << secondBlock->GetSize();
446     ASSERT(firstBlock != nullptr);
447     ASSERT(firstBlock->GetNextHeader() == secondBlock);
448     ASSERT(secondBlock != nullptr);
449     ASSERT(firstBlock->CanBeCoalescedWithNext() || secondBlock->CanBeCoalescedWithPrev());
450     firstBlock->Initialize(firstBlock->GetSize() + secondBlock->GetSize() + sizeof(MemoryBlockHeader),
451                            firstBlock->GetPrevHeader());
452     if (secondBlock->IsLastBlockInPool()) {
453         LOG_FREELIST_ALLOCATOR(DEBUG) << "CoalesceMemoryBlock: second_block was the last in a pool";
454         firstBlock->SetLastBlockInPool();
455     } else {
456         ASSERT(firstBlock->GetNextHeader() != nullptr);
457         firstBlock->GetNextHeader()->SetPrevHeader(firstBlock);
458     }
459 }
460 
461 template <typename AllocConfigT, typename LockConfigT>
SplitMemoryBlocks(MemoryBlockHeader * memoryBlock,size_t firstBlockSize)462 freelist::MemoryBlockHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SplitMemoryBlocks(
463     MemoryBlockHeader *memoryBlock, size_t firstBlockSize)
464 {
465     ASSERT(memoryBlock->GetSize() > (firstBlockSize + sizeof(MemoryBlockHeader)));
466     ASSERT(!memoryBlock->IsUsed());
467     auto secondBlock =
468         static_cast<MemoryBlockHeader *>(ToVoidPtr(ToUintPtr(memoryBlock->GetMemory()) + firstBlockSize));
469     size_t secondBlockSize = memoryBlock->GetSize() - firstBlockSize - sizeof(MemoryBlockHeader);
470     secondBlock->Initialize(secondBlockSize, memoryBlock);
471     if (memoryBlock->IsLastBlockInPool()) {
472         secondBlock->SetLastBlockInPool();
473     } else {
474         ASSERT(secondBlock->GetNextHeader() != nullptr);
475         secondBlock->GetNextHeader()->SetPrevHeader(secondBlock);
476     }
477     memoryBlock->Initialize(firstBlockSize, memoryBlock->GetPrevHeader());
478     return secondBlock;
479 }
480 
481 template <typename AllocConfigT, typename LockConfigT>
GetFreeListMemoryHeader(void * mem)482 freelist::MemoryBlockHeader *FreeListAllocator<AllocConfigT, LockConfigT>::GetFreeListMemoryHeader(void *mem)
483 {
484     ASSERT(mem != nullptr);
485     auto memoryHeader = static_cast<MemoryBlockHeader *>(ToVoidPtr(ToUintPtr(mem) - sizeof(MemoryBlockHeader)));
486     if (!memoryHeader->IsPaddingHeader()) {
487         // We got correct header of this memory, just return it
488         return memoryHeader;
489     }
490     // This is aligned memory with some free space before the memory pointer
491     // The previous header must be a pointer to correct header of this memory block
492     LOG_FREELIST_ALLOCATOR(DEBUG) << "It is a memory with padding at head";
493     return memoryHeader->GetPrevHeader();
494 }
495 
496 template <typename AllocConfigT, typename LockConfigT>
AllocatedByFreeListAllocator(void * mem)497 bool FreeListAllocator<AllocConfigT, LockConfigT>::AllocatedByFreeListAllocator(void *mem)
498 {
499     os::memory::ReadLockHolder rlock(allocFreeLock_);
500     return AllocatedByFreeListAllocatorUnsafe(mem);
501 }
502 
503 template <typename AllocConfigT, typename LockConfigT>
AllocatedByFreeListAllocatorUnsafe(void * mem)504 bool FreeListAllocator<AllocConfigT, LockConfigT>::AllocatedByFreeListAllocatorUnsafe(void *mem)
505 {
506     // NOTE(aemelenko): Create more complex solution
507     MemoryPoolHeader *currentPool = mempoolHead_;
508     while (currentPool != nullptr) {
509         // This assert means that we asked about memory inside MemoryPoolHeader
510         ASSERT(ToUintPtr(currentPool->GetFirstMemoryHeader()) > ToUintPtr(mem) ||
511                ToUintPtr(currentPool) < ToUintPtr(mem));
512         if ((ToUintPtr(currentPool->GetFirstMemoryHeader()) < ToUintPtr(mem)) &&
513             ((ToUintPtr(currentPool) + currentPool->GetSize()) > ToUintPtr(mem))) {
514             return true;
515         }
516         currentPool = currentPool->GetNext();
517     }
518     return false;
519 }
520 
521 template <typename AllocConfigT, typename LockConfigT>
TryToCoalescing(MemoryBlockHeader * memoryHeader)522 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::TryToCoalescing(MemoryBlockHeader *memoryHeader)
523 {
524     ASSERT(memoryHeader != nullptr);
525     LOG_FREELIST_ALLOCATOR(DEBUG) << "TryToCoalescing memory block";
526     if (memoryHeader->CanBeCoalescedWithNext()) {
527         ASSERT(!memoryHeader->GetNextHeader()->IsUsed());
528         LOG_FREELIST_ALLOCATOR(DEBUG) << "Coalesce with next block";
529         auto nextFreeList = static_cast<FreeListHeader *>(memoryHeader->GetNextHeader());
530         ASSERT(nextFreeList != nullptr);
531         // Pop this free list element from the list
532         nextFreeList->PopFromFreeList();
533         // Combine these two blocks together
534         CoalesceMemoryBlocks(memoryHeader, static_cast<MemoryBlockHeader *>(nextFreeList));
535     }
536     if (memoryHeader->CanBeCoalescedWithPrev()) {
537         ASSERT(!memoryHeader->GetPrevHeader()->IsUsed());
538         LOG_FREELIST_ALLOCATOR(DEBUG) << "Coalesce with prev block";
539         auto prevFreeList = static_cast<FreeListHeader *>(memoryHeader->GetPrevHeader());
540         // Pop this free list element from the list
541         prevFreeList->PopFromFreeList();
542         // Combine these two blocks together
543         CoalesceMemoryBlocks(static_cast<MemoryBlockHeader *>(prevFreeList), memoryHeader);
544         memoryHeader = static_cast<MemoryBlockHeader *>(prevFreeList);
545     }
546     return static_cast<FreeListHeader *>(memoryHeader);
547 }
548 
549 template <typename AllocConfigT, typename LockConfigT>
AddToSegregatedList(FreeListHeader * freeListElement)550 void FreeListAllocator<AllocConfigT, LockConfigT>::AddToSegregatedList(FreeListHeader *freeListElement)
551 {
552     LOG_FREELIST_ALLOCATOR(DEBUG) << "AddToSegregatedList: Add new block into segregated-list with size "
553                                   << freeListElement->GetSize();
554     segregatedList_.AddMemoryBlock(freeListElement);
555 }
556 
557 template <typename AllocConfigT, typename LockConfigT>
GetFromSegregatedList(size_t size,Alignment align)558 freelist::MemoryBlockHeader *FreeListAllocator<AllocConfigT, LockConfigT>::GetFromSegregatedList(size_t size,
559                                                                                                  Alignment align)
560 {
561     LOG_FREELIST_ALLOCATOR(DEBUG) << "GetFromSegregatedList: Try to find memory for size " << size << " with alignment "
562                                   << align;
563     size_t alignedSize = size;
564     if (align != FREELIST_DEFAULT_ALIGNMENT) {
565         // NOTE(aemelenko): This is raw but fast solution with bigger fragmentation.
566         // It's better to add here this value, but I'm not 100% sure about all corner cases.
567         // (GetAlignmentInBytes(align) + sizeof(MemoryBlockHeader) - GetAlignmentInBytes(FREELIST_DEFAULT_ALIGNMENT))
568         alignedSize += (GetAlignmentInBytes(align) + sizeof(MemoryBlockHeader));
569     }
570     FreeListHeader *memBlock = segregatedList_.FindMemoryBlock(alignedSize);
571     if (memBlock != nullptr) {
572         memBlock->PopFromFreeList();
573         ASSERT((AlignUp(ToUintPtr(memBlock->GetMemory()), GetAlignmentInBytes(align)) -
574                 ToUintPtr(memBlock->GetMemory()) + size) <= memBlock->GetSize());
575     }
576     return static_cast<MemoryBlockHeader *>(memBlock);
577 }
578 
579 template <typename AllocConfigT, typename LockConfigT>
Initialize(size_t size,MemoryPoolHeader * prev,MemoryPoolHeader * next)580 ATTRIBUTE_NO_SANITIZE_ADDRESS void FreeListAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::Initialize(
581     size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next)
582 {
583     LOG_FREELIST_ALLOCATOR(DEBUG) << "Init a new memory pool with size " << size << " with prev link = " << std::hex
584                                   << prev << " and next link = " << next;
585     ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
586     size_ = size;
587     prev_ = prev;
588     next_ = next;
589     ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
590 }
591 
592 template <typename AllocConfigT, typename LockConfigT>
AddMemoryBlock(FreeListHeader * freelistHeader)593 void FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::AddMemoryBlock(FreeListHeader *freelistHeader)
594 {
595     size_t size = freelistHeader->GetSize();
596     size_t index = GetIndex(size);
597     if (SEGREGATED_LIST_FAST_INSERT) {
598         freeMemoryBlocks_[index].InsertNext(freelistHeader);
599     } else {
600         FreeListHeader *mostSuitable = FindTheMostSuitableBlockInOrderedList(index, size);
601         // The most suitable block with such must be equal to this size,
602         // or the last with the bigger size in the ordered list,
603         // or nullptr
604         if (mostSuitable == nullptr) {
605             freeMemoryBlocks_[index].InsertNext(freelistHeader);
606         } else {
607             mostSuitable->InsertNext(freelistHeader);
608         }
609     }
610 }
611 
612 template <typename AllocConfigT, typename LockConfigT>
FindMemoryBlock(size_t size)613 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::FindMemoryBlock(size_t size)
614 {
615     size_t index = GetIndex(size);
616     FreeListHeader *head = GetFirstBlock(index);
617     FreeListHeader *suitableBlock = nullptr;
618     if (head != nullptr) {
619         // We have some memory in this range. Try to find suitable block.
620         if (SEGREGATED_LIST_FAST_INSERT) {
621             // We don't have any order in inserting blocks,
622             // and we need to iterate over the whole list
623             suitableBlock = FindSuitableBlockInList(head, size);
624         } else {
625             // All blocks in this list are in descending order.
626             // We can check the first one to determine if we have a block
627             // with this size or not.
628             suitableBlock = TryGetSuitableBlockBySize(head, size, index);
629         }
630     }
631 
632     if (suitableBlock == nullptr) {
633         // We didn't find the block in the head list. Try to find block in other lists.
634         index++;
635         for (; index < SEGREGATED_LIST_SIZE; index++) {
636             if (GetFirstBlock(index) == nullptr) {
637                 continue;
638             }
639             if constexpr (SEGREGATED_LIST_FAST_INSERT || SEGREGATED_LIST_FAST_EXTRACT) {
640                 // Just get the first one
641                 suitableBlock = GetFirstBlock(index);
642             } else {
643                 suitableBlock = FindTheMostSuitableBlockInOrderedList(index, size);
644             }
645             break;
646         }
647     }
648 
649     if (suitableBlock != nullptr) {
650         ASSERT(suitableBlock->GetSize() >= size);
651     }
652 
653     return suitableBlock;
654 }
655 
656 template <typename AllocConfigT, typename LockConfigT>
FindSuitableBlockInList(freelist::FreeListHeader * head,size_t size)657 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::FindSuitableBlockInList(
658     freelist::FreeListHeader *head, size_t size)
659 {
660     FreeListHeader *suitableBlock = nullptr;
661     for (FreeListHeader *current = head; current != nullptr; current = current->GetNextFree()) {
662         if (current->GetSize() < size) {
663             continue;
664         }
665         if constexpr (SEGREGATED_LIST_FAST_EXTRACT) {
666             suitableBlock = current;
667             break;
668         }
669         if (suitableBlock != nullptr) {
670             suitableBlock = current;
671         } else {
672             if (suitableBlock->GetSize() > current->GetSize()) {
673                 suitableBlock = current;
674             }
675         }
676         if (suitableBlock->GetSize() == size) {
677             break;
678         }
679     }
680     return suitableBlock;
681 }
682 
683 template <typename AllocConfigT, typename LockConfigT>
TryGetSuitableBlockBySize(freelist::FreeListHeader * head,size_t size,size_t index)684 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::TryGetSuitableBlockBySize(
685     freelist::FreeListHeader *head, size_t size, size_t index)
686 {
687     if (head->GetSize() >= size) {
688         if constexpr (SEGREGATED_LIST_FAST_EXTRACT) {
689             // Just get the first element
690             return head;
691         }
692         // Try to find the mist suitable memory for this size.
693         return FindTheMostSuitableBlockInOrderedList(index, size);
694     }
695     return nullptr;
696 }
697 
698 template <typename AllocConfigT, typename LockConfigT>
ReleaseFreeMemoryBlocks()699 void FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::ReleaseFreeMemoryBlocks()
700 {
701     for (size_t index = 0; index < SEGREGATED_LIST_SIZE; index++) {
702         FreeListHeader *current = GetFirstBlock(index);
703         while (current != nullptr) {
704             size_t blockSize = current->GetSize();
705             // Start address from which we can release pages
706             uintptr_t startAddr = AlignUp(ToUintPtr(current) + sizeof(FreeListHeader), os::mem::GetPageSize());
707             // End address before which we can release pages
708             uintptr_t endAddr =
709                 os::mem::AlignDownToPageSize(ToUintPtr(current) + sizeof(MemoryBlockHeader) + blockSize);
710             if (startAddr < endAddr) {
711                 os::mem::ReleasePages(startAddr, endAddr);
712             }
713             current = current->GetNextFree();
714         }
715     }
716 }
717 
718 template <typename AllocConfigT, typename LockConfigT>
719 freelist::FreeListHeader *
FindTheMostSuitableBlockInOrderedList(size_t index,size_t size)720 FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::FindTheMostSuitableBlockInOrderedList(size_t index,
721                                                                                                     size_t size)
722 {
723     static_assert(!SEGREGATED_LIST_FAST_INSERT);
724     FreeListHeader *current = GetFirstBlock(index);
725     if (current == nullptr) {
726         return nullptr;
727     }
728     size_t currentSize = current->GetSize();
729     if (currentSize < size) {
730         return nullptr;
731     }
732     while (currentSize != size) {
733         FreeListHeader *next = current->GetNextFree();
734         if (next == nullptr) {
735             // the current free list header is the last in the list.
736             break;
737         }
738         size_t nextSize = next->GetSize();
739         if (nextSize < size) {
740             // the next free list header is less than size,
741             // so we don't need to iterate anymore.
742             break;
743         }
744         current = next;
745         currentSize = nextSize;
746     }
747     return current;
748 }
749 
750 template <typename AllocConfigT, typename LockConfigT>
GetIndex(size_t size)751 size_t FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::GetIndex(size_t size)
752 {
753     ASSERT(size >= FREELIST_ALLOCATOR_MIN_SIZE);
754     size_t index = (size - FREELIST_ALLOCATOR_MIN_SIZE) / SEGREGATED_LIST_FREE_BLOCK_RANGE;
755     return index < SEGREGATED_LIST_SIZE ? index : SEGREGATED_LIST_SIZE - 1;
756 }
757 
758 template <typename AllocConfigT, typename LockConfigT>
ContainObject(const ObjectHeader * obj)759 bool FreeListAllocator<AllocConfigT, LockConfigT>::ContainObject(const ObjectHeader *obj)
760 {
761     return AllocatedByFreeListAllocatorUnsafe(const_cast<ObjectHeader *>(obj));
762 }
763 
764 template <typename AllocConfigT, typename LockConfigT>
IsLive(const ObjectHeader * obj)765 bool FreeListAllocator<AllocConfigT, LockConfigT>::IsLive(const ObjectHeader *obj)
766 {
767     ASSERT(ContainObject(obj));
768     void *objMem = static_cast<void *>(const_cast<ObjectHeader *>(obj));
769     // Get start address of pool via PoolManager for input object
770     // for avoid iteration over all pools in allocator
771     auto memPoolHeader =
772         static_cast<MemoryPoolHeader *>(PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(objMem));
773     [[maybe_unused]] auto allocInfo =
774         PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(static_cast<void *>(memPoolHeader));
775     ASSERT(allocInfo.GetAllocatorHeaderAddr() == static_cast<const void *>(this));
776     MemoryBlockHeader *currentMemHeader = memPoolHeader->GetFirstMemoryHeader();
777     while (currentMemHeader != nullptr) {
778         if (currentMemHeader->IsUsed()) {
779             if (currentMemHeader->GetMemory() == objMem) {
780                 return true;
781             }
782         }
783         currentMemHeader = currentMemHeader->GetNextUsedHeader();
784     }
785     return false;
786 }
787 
788 #undef LOG_FREELIST_ALLOCATOR
789 
790 }  // namespace ark::mem
791 
792 #endif  // PANDA_MEM_FREELIST_ALLOCATOR_INL_H
793