• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_MEM_FREELIST_ALLOCATOR_INL_H
16 #define PANDA_MEM_FREELIST_ALLOCATOR_INL_H
17 
18 #include "libpandabase/utils/logger.h"
19 #include "runtime/mem/alloc_config.h"
20 #include "runtime/mem/freelist_allocator.h"
21 #include "runtime/mem/object_helpers.h"
22 
23 namespace ark::mem {
24 
25 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
26 #define LOG_FREELIST_ALLOCATOR(level) LOG(level, ALLOC) << "FreeListAllocator: "
27 
28 template <typename AllocConfigT, typename LockConfigT>
FreeListAllocator(MemStatsType * memStats,SpaceType typeAllocation)29 FreeListAllocator<AllocConfigT, LockConfigT>::FreeListAllocator(MemStatsType *memStats, SpaceType typeAllocation)
30     : typeAllocation_(typeAllocation), memStats_(memStats)
31 {
32     LOG_FREELIST_ALLOCATOR(DEBUG) << "Initializing FreeListAllocator";
33     ASAN_POISON_MEMORY_REGION(&segregatedList_, sizeof(segregatedList_));
34     LOG_FREELIST_ALLOCATOR(DEBUG) << "Initializing FreeListAllocator finished";
35 }
36 
37 template <typename AllocConfigT, typename LockConfigT>
~FreeListAllocator()38 FreeListAllocator<AllocConfigT, LockConfigT>::~FreeListAllocator()
39 {
40     LOG_FREELIST_ALLOCATOR(DEBUG) << "Destroying FreeListAllocator";
41     ASAN_UNPOISON_MEMORY_REGION(&segregatedList_, sizeof(segregatedList_));
42     LOG_FREELIST_ALLOCATOR(DEBUG) << "Destroying FreeListAllocator finished";
43 }
44 
45 template <typename AllocConfigT, typename LockConfigT>
46 template <bool NEED_LOCK>
47 // CC-OFFNXT(G.FUN.01-CPP, G.FUD.06) Allocations perf critical, the change will create additional conditions and method
48 // calls that will degrade performance
Alloc(size_t size,Alignment align)49 inline void *FreeListAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
50 {
51     os::memory::WriteLockHolder<LockConfigT, NEED_LOCK> wlock(allocFreeLock_);
52     LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to allocate object with size " << std::dec << size;
53     size_t allocSize = size;
54     if (allocSize < FREELIST_ALLOCATOR_MIN_SIZE) {
55         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to allocate an object with size less than min for this allocator";
56         allocSize = FREELIST_ALLOCATOR_MIN_SIZE;
57     }
58     size_t defAlignedSize = AlignUp(allocSize, GetAlignmentInBytes(FREELIST_DEFAULT_ALIGNMENT));
59     if (defAlignedSize > allocSize) {
60         allocSize = defAlignedSize;
61         LOG_FREELIST_ALLOCATOR(DEBUG) << "Align size to default alignment. New size = " << allocSize;
62     }
63     if (allocSize > FREELIST_MAX_ALLOC_SIZE) {
64         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try allocate too big memory for free list allocator. Return nullptr";
65         return nullptr;
66     }
67     // Get best-fit memory piece from segregated list.
68     MemoryBlockHeader *memoryBlock = GetFromSegregatedList(allocSize, align);
69     if (memoryBlock == nullptr) {
70         LOG_FREELIST_ALLOCATOR(DEBUG) << "Couldn't allocate memory";
71         return nullptr;
72     }
73     LOG_FREELIST_ALLOCATOR(DEBUG) << "Found memory block at addr = " << std::hex << memoryBlock << " with size "
74                                   << std::dec << memoryBlock->GetSize();
75     ASSERT(memoryBlock->GetSize() >= allocSize);
76     uintptr_t memoryPointer = ToUintPtr(memoryBlock->GetMemory());
77     bool requiredPadding = false;
78     if ((memoryPointer & (GetAlignmentInBytes(align) - 1)) != 0U) {
79         LOG_FREELIST_ALLOCATOR(DEBUG) << "Raw memory is not aligned as we need. Create special header for padding";
80         // Raw memory pointer is not aligned as we expected
81         // We need to create extra header inside
82         uintptr_t alignedMemoryPointer = AlignUp(memoryPointer + sizeof(MemoryBlockHeader), GetAlignmentInBytes(align));
83         size_t sizeWithPadding = allocSize + (alignedMemoryPointer - memoryPointer);
84         ASSERT(memoryBlock->GetSize() >= sizeWithPadding);
85         auto paddingHeader =
86             static_cast<MemoryBlockHeader *>(ToVoidPtr(alignedMemoryPointer - sizeof(MemoryBlockHeader)));
87         LOG_FREELIST_ALLOCATOR(DEBUG) << "Created padding header at addr " << std::hex << paddingHeader;
88         paddingHeader->Initialize(allocSize, memoryBlock);
89         paddingHeader->SetAsPaddingHeader();
90         // Update values
91         memoryPointer = alignedMemoryPointer;
92         allocSize = sizeWithPadding;
93         requiredPadding = true;
94     }
95     if (CanCreateNewBlockFromRemainder(memoryBlock, allocSize)) {
96         LOG_FREELIST_ALLOCATOR(DEBUG) << "Created new memory block from the remainder part:";
97         MemoryBlockHeader *newFreeBlock = SplitMemoryBlocks(memoryBlock, allocSize);
98         LOG_FREELIST_ALLOCATOR(DEBUG) << "New block started at addr " << std::hex << newFreeBlock << " with size "
99                                       << std::dec << newFreeBlock->GetSize();
100         memoryBlock->SetUsed();
101         FreeListHeader *newFreeListElement = TryToCoalescing(newFreeBlock);
102         ASSERT(!newFreeListElement->IsUsed());
103         AddToSegregatedList(newFreeListElement);
104     } else {
105         LOG_FREELIST_ALLOCATOR(DEBUG) << "Can't create new block from the remainder. Use full block.";
106         memoryBlock->SetUsed();
107     }
108     if (requiredPadding) {
109         // We must update some values in current memory_block
110         uintptr_t paddingSize = memoryPointer - ToUintPtr(memoryBlock->GetMemory());
111         if (paddingSize == sizeof(MemoryBlockHeader)) {
112             LOG_FREELIST_ALLOCATOR(DEBUG) << "SetHasPaddingHeaderAfter";
113             memoryBlock->SetPaddingHeaderStoredAfterHeader();
114         } else {
115             LOG_FREELIST_ALLOCATOR(DEBUG) << "SetHasPaddingSizeAfter, size = " << paddingSize;
116             memoryBlock->SetPaddingSizeStoredAfterHeader();
117             memoryBlock->SetPaddingSize(paddingSize);
118         }
119     }
120     LOG_FREELIST_ALLOCATOR(DEBUG) << "Allocated memory at addr " << std::hex << ToVoidPtr(memoryPointer);
121     {
122         AllocConfigT::OnAlloc(memoryBlock->GetSize(), typeAllocation_, memStats_);
123         // It is not the object size itself, because we can't compute it from MemoryBlockHeader structure at Free call.
124         // It is an approximation.
125         size_t currentSize =
126             ToUintPtr(memoryBlock) + memoryBlock->GetSize() + sizeof(MemoryBlockHeader) - memoryPointer;
127         AllocConfigT::AddToCrossingMap(ToVoidPtr(memoryPointer), currentSize);
128     }
129     ASAN_UNPOISON_MEMORY_REGION(ToVoidPtr(memoryPointer), size);
130     AllocConfigT::MemoryInit(ToVoidPtr(memoryPointer));
131 #ifdef PANDA_MEASURE_FRAGMENTATION
132     // Atomic with relaxed order reason: order is not required
133     allocatedBytes_.fetch_add(memoryBlock->GetSize(), std::memory_order_relaxed);
134 #endif
135     return ToVoidPtr(memoryPointer);
136 }
137 
138 template <typename AllocConfigT, typename LockConfigT>
Free(void * mem)139 void FreeListAllocator<AllocConfigT, LockConfigT>::Free(void *mem)
140 {
141     os::memory::WriteLockHolder wlock(allocFreeLock_);
142     FreeUnsafe(mem);
143 }
144 
145 template <typename AllocConfigT, typename LockConfigT>
FreeUnsafe(void * mem)146 void FreeListAllocator<AllocConfigT, LockConfigT>::FreeUnsafe(void *mem)
147 {
148     if (UNLIKELY(mem == nullptr)) {
149         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to free memory at invalid addr 0";
150         return;
151     }
152     LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to free memory at addr " << std::hex << mem;
153 #ifndef NDEBUG
154     if (!AllocatedByFreeListAllocatorUnsafe(mem)) {
155         LOG_FREELIST_ALLOCATOR(DEBUG) << "Try to free memory no from this allocator";
156         return;
157     }
158 #endif  // !NDEBUG
159 
160     MemoryBlockHeader *memoryHeader = GetFreeListMemoryHeader(mem);
161     LOG_FREELIST_ALLOCATOR(DEBUG) << "It is a memory with header " << std::hex << memoryHeader << " with size "
162                                   << std::dec << memoryHeader->GetSize() << " (probably with padding)";
163     {
164         AllocConfigT::OnFree(memoryHeader->GetSize(), typeAllocation_, memStats_);
165         // It is not the object size itself, because we can't compute it from MemoryBlockHeader structure.
166         // It is an approximation.
167         size_t currentSize =
168             ToUintPtr(memoryHeader) + memoryHeader->GetSize() + sizeof(MemoryBlockHeader) - ToUintPtr(mem);
169         MemoryBlockHeader *prevUsedHeader = memoryHeader->GetPrevUsedHeader();
170         void *prevObject = nullptr;
171         size_t prevSize = 0;
172         if (prevUsedHeader != nullptr) {
173             prevObject = prevUsedHeader->GetMemory();
174             prevSize = ToUintPtr(prevUsedHeader) + prevUsedHeader->GetSize() + sizeof(MemoryBlockHeader) -
175                        ToUintPtr(prevUsedHeader->GetMemory());
176         }
177         MemoryBlockHeader *nextUsedHeader = memoryHeader->GetNextUsedHeader();
178         void *nextObject = nullptr;
179         if (nextUsedHeader != nullptr) {
180             nextObject = nextUsedHeader->GetMemory();
181         }
182         AllocConfigT::RemoveFromCrossingMap(mem, currentSize, nextObject, prevObject, prevSize);
183     }
184     memoryHeader->SetUnused();
185     memoryHeader->SetCommonHeader();
186     FreeListHeader *newFreeListElement = TryToCoalescing(memoryHeader);
187     ASAN_POISON_MEMORY_REGION(newFreeListElement, newFreeListElement->GetSize() + sizeof(MemoryBlockHeader));
188     AddToSegregatedList(newFreeListElement);
189     LOG_FREELIST_ALLOCATOR(DEBUG) << "Freed memory at addr " << std::hex << mem;
190 #ifdef PANDA_MEASURE_FRAGMENTATION
191     // Atomic with relaxed order reason: order is not required
192     allocatedBytes_.fetch_sub(memoryHeader->GetSize(), std::memory_order_relaxed);
193 #endif
194 }
195 
196 // NOTE(aemelenko): We can create a mutex for each pool to increase performance,
197 // but it requires pool alignment restriction
198 // (we must compute memory pool header addr from a memory block addr stored inside it)
199 
200 // During Collect method call we iterate over memory blocks in each pool.
201 // This iteration can cause race conditions in multithreaded mode.
202 // E.g. in this scenario:
203 // |-------|---------|-------------------|------------------------------------------------------------------|
204 // | time: | Thread: |    Description:   |                         Memory footprint:                        |
205 // |-------|---------|-------------------|------------------------------------------------------------------|
206 // |       |         | Thread0 starts    |  |..............Free  Block.............|...Allocated block...|  |
207 // |       |         | iterating         |  |                                                               |
208 // |   0   |    0    | over mem blocks   |  current block pointer                                           |
209 // |       |         | and current block |                                                                  |
210 // |       |         | is free block     |                                                                  |
211 // |-------|---------|-------------------|------------------------------------------------------------------|
212 // |       |         | Thread1           |  |...Allocated block...|................|...Allocated block...|  |
213 // |   1   |    1    | allocates memory  |                        |                                         |
214 // |       |         | at this block     |               Unused memory peace                                |
215 // |       |         |                   |                                                                  |
216 // |-------|---------|-------------------|------------------------------------------------------------------|
217 // |       |         | Thread1           |  |...Allocated block...|................|...Allocated block...|  |
218 // |   2   |    1    | set up values in  |  |                                                               |
219 // |       |         | this block header |  change size of this block                                       |
220 // |       |         | (set up size)     |                                                                  |
221 // |-------|---------|-------------------|------------------------------------------------------------------|
222 // |       |         | Thread0 reads     |  |...Allocated block...|................|...Allocated block...|  |
223 // |       |         | some garbage or   |                                                                  |
224 // |   3   |    0    | wrong value to    |  current block pointer - points to wrong memory                  |
225 // |       |         | calculate next    |                                                                  |
226 // |       |         | block pointer     |                                                                  |
227 // |-------|---------|-------------------|------------------------------------------------------------------|
228 //
229 // Therefore, we must unlock allocator's alloc/free methods only
230 // when we have a pointer to allocated block (i.e. IsUsed())
231 
232 template <typename AllocConfigT, typename LockConfigT>
Collect(const GCObjectVisitor & deathCheckerFn)233 void FreeListAllocator<AllocConfigT, LockConfigT>::Collect(const GCObjectVisitor &deathCheckerFn)
234 {
235     LOG_FREELIST_ALLOCATOR(DEBUG) << "Collecting started";
236     // NOTE(aemelenko): Looks like we can unlock alloc_free_lock_ for not dead objects during collection
237     IterateOverObjects([this, &deathCheckerFn](ObjectHeader *mem) {
238         if (deathCheckerFn(mem) == ObjectStatus::DEAD_OBJECT) {
239             LOG(DEBUG, GC) << "DELETE OBJECT " << GetDebugInfoAboutObject(mem);
240             FreeUnsafe(mem);
241         }
242     });
243     LOG_FREELIST_ALLOCATOR(DEBUG) << "Collecting finished";
244 }
245 
246 template <typename AllocConfigT, typename LockConfigT>
247 template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & objectVisitor)248 void FreeListAllocator<AllocConfigT, LockConfigT>::IterateOverObjects(const ObjectVisitor &objectVisitor)
249 {
250     LOG_FREELIST_ALLOCATOR(DEBUG) << "Iterating over objects started";
251     MemoryPoolHeader *currentPool = nullptr;
252     {
253         // Do this under lock because the pointer for mempool_tail can be changed by other threads
254         // in AddMemoryPool method call.
255         // NOTE: We add each new pool at the mempool_tail_. Therefore, we can read it once and iterate to head.
256         os::memory::ReadLockHolder rlock(allocFreeLock_);
257         currentPool = mempoolTail_;
258     }
259     while (currentPool != nullptr) {
260         LOG_FREELIST_ALLOCATOR(DEBUG) << "  iterate over " << std::hex << currentPool;
261         MemoryBlockHeader *currentMemHeader = currentPool->GetFirstMemoryHeader();
262         while (currentMemHeader != nullptr) {
263             // Lock any possible memory headers changes in the allocator.
264             os::memory::WriteLockHolder wlock(allocFreeLock_);
265             if (currentMemHeader->IsUsed()) {
266                 // We can call Free inside mem_visitor, that's why we should lock everything
267                 objectVisitor(static_cast<ObjectHeader *>(currentMemHeader->GetMemory()));
268                 // At this point, current_mem_header can point to a nonexistent memory block
269                 // (e.g., if we coalesce it with the previous free block).
270                 // However, we still have valid MemoryBlockHeader class field here.
271                 // NOTE: Firstly, we coalesce current_mem_header block with next (if it is free)
272                 // and update current block size. Therefore, we have a valid pointer to next memory block.
273 
274                 // After calling mem_visitor() the current_mem_header may points to free block of memory,
275                 // which can be modified at the alloc call in other thread.
276                 // Therefore, do not unlock until we have a pointer to the Used block.
277                 currentMemHeader = currentMemHeader->GetNextUsedHeader();
278             } else {
279                 // Current header marked as Unused so it can be modify by an allocation in some thread.
280                 // So, read next header with in locked state.
281                 currentMemHeader = currentMemHeader->GetNextUsedHeader();
282             }
283             // We have a pointer to Used memory block, or to nullptr.
284             // Therefore, we can unlock.
285         }
286         currentPool = currentPool->GetPrev();
287     }
288     LOG_FREELIST_ALLOCATOR(DEBUG) << "Iterating over objects finished";
289 }
290 
291 template <typename AllocConfigT, typename LockConfigT>
AddMemoryPool(void * mem,size_t size)292 bool FreeListAllocator<AllocConfigT, LockConfigT>::AddMemoryPool(void *mem, size_t size)
293 {
294     // Lock alloc/free because we add new block to segregated list here.
295     os::memory::WriteLockHolder wlock(allocFreeLock_);
296     ASSERT(mem != nullptr);
297     LOG_FREELIST_ALLOCATOR(DEBUG) << "Add memory pool to FreeListAllocator from  " << std::hex << mem << " with size "
298                                   << std::dec << size;
299     ASSERT((ToUintPtr(mem) & (sizeof(MemoryBlockHeader) - 1)) == 0U);
300     auto mempoolHeader = static_cast<MemoryPoolHeader *>(mem);
301     if (mempoolHead_ == nullptr) {
302         LOG_FREELIST_ALLOCATOR(DEBUG) << "Initialize mempool_head_";
303         mempoolHeader->Initialize(size, nullptr, nullptr);
304         mempoolHead_ = mempoolHeader;
305         mempoolTail_ = mempoolHeader;
306     } else {
307         LOG_FREELIST_ALLOCATOR(DEBUG) << "Add this memory pool at the tail after block " << std::hex << mempoolTail_;
308         mempoolHeader->Initialize(size, mempoolTail_, nullptr);
309         mempoolTail_->SetNext(mempoolHeader);
310         mempoolTail_ = mempoolHeader;
311     }
312     MemoryBlockHeader *firstMemHeader = mempoolHeader->GetFirstMemoryHeader();
313     firstMemHeader->Initialize(size - sizeof(MemoryBlockHeader) - sizeof(MemoryPoolHeader), nullptr);
314     firstMemHeader->SetLastBlockInPool();
315     AddToSegregatedList(static_cast<FreeListHeader *>(firstMemHeader));
316     ASAN_POISON_MEMORY_REGION(mem, size);
317     AllocConfigT::InitializeCrossingMapForMemory(mem, size);
318     return true;
319 }
320 
321 template <typename AllocConfigT, typename LockConfigT>
322 template <typename MemVisitor>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)323 void FreeListAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
324 {
325     // We call this method and return pools to the system.
326     // Therefore, delete all objects to clear all external dependences
327     LOG_FREELIST_ALLOCATOR(DEBUG) << "Clear all objects inside the allocator";
328     // Lock everything to avoid race condition.
329     os::memory::WriteLockHolder wlock(allocFreeLock_);
330     MemoryPoolHeader *currentPool = mempoolHead_;
331     while (currentPool != nullptr) {
332         // Use tmp in case if visitor with side effects
333         MemoryPoolHeader *tmp = currentPool->GetNext();
334         AllocConfigT::RemoveCrossingMapForMemory(currentPool, currentPool->GetSize());
335         memVisitor(currentPool, currentPool->GetSize());
336         currentPool = tmp;
337     }
338 }
339 
340 template <typename AllocConfigT, typename LockConfigT>
341 template <typename MemVisitor>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)342 void FreeListAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
343 {
344     // Lock everything to avoid race condition.
345     os::memory::WriteLockHolder wlock(allocFreeLock_);
346     MemoryPoolHeader *currentPool = mempoolHead_;
347     while (currentPool != nullptr) {
348         // Use tmp in case if visitor with side effects
349         MemoryPoolHeader *tmp = currentPool->GetNext();
350         MemoryBlockHeader *firstBlock = currentPool->GetFirstMemoryHeader();
351         if (firstBlock->IsLastBlockInPool() && !firstBlock->IsUsed()) {
352             // We have only one big memory block in this pool,
353             // and this block is not used
354             LOG_FREELIST_ALLOCATOR(DEBUG)
355                 << "VisitAndRemoveFreePools: Remove free memory pool from allocator with start addr" << std::hex
356                 << currentPool << " and size " << std::dec << currentPool->GetSize()
357                 << " bytes with the first block at addr " << std::hex << firstBlock << " and size " << std::dec
358                 << firstBlock->GetSize();
359             auto freeHeader = static_cast<FreeListHeader *>(firstBlock);
360             freeHeader->PopFromFreeList();
361             MemoryPoolHeader *next = currentPool->GetNext();
362             MemoryPoolHeader *prev = currentPool->GetPrev();
363             if (next != nullptr) {
364                 ASSERT(next->GetPrev() == currentPool);
365                 next->SetPrev(prev);
366             } else {
367                 // This means that current pool is the last
368                 ASSERT(mempoolTail_ == currentPool);
369                 LOG_FREELIST_ALLOCATOR(DEBUG) << "VisitAndRemoveFreePools: Change pools tail pointer";
370                 mempoolTail_ = prev;
371             }
372             if (prev != nullptr) {
373                 ASSERT(prev->GetNext() == currentPool);
374                 prev->SetNext(next);
375             } else {
376                 // This means that current pool is the first
377                 ASSERT(mempoolHead_ == currentPool);
378                 LOG_FREELIST_ALLOCATOR(DEBUG) << "VisitAndRemoveFreePools: Change pools head pointer";
379                 mempoolHead_ = next;
380             }
381             AllocConfigT::RemoveCrossingMapForMemory(currentPool, currentPool->GetSize());
382             memVisitor(currentPool, currentPool->GetSize());
383         }
384         currentPool = tmp;
385     }
386     segregatedList_.ReleaseFreeMemoryBlocks();
387 }
388 
389 template <typename AllocConfigT, typename LockConfigT>
390 template <typename MemVisitor>
IterateOverObjectsInRange(const MemVisitor & memVisitor,void * leftBorder,void * rightBorder)391 void FreeListAllocator<AllocConfigT, LockConfigT>::IterateOverObjectsInRange(const MemVisitor &memVisitor,
392                                                                              void *leftBorder, void *rightBorder)
393 {
394     // NOTE: Current implementation doesn't look at PANDA_CROSSING_MAP_MANAGE_CROSSED_BORDER flag
395     LOG_FREELIST_ALLOCATOR(DEBUG) << "FreeListAllocator::IterateOverObjectsInRange for range [" << std::hex
396                                   << leftBorder << ", " << rightBorder << "]";
397     ASSERT(ToUintPtr(rightBorder) >= ToUintPtr(leftBorder));
398     // NOTE(aemelenko): These are temporary asserts because we can't do anything
399     // if the range crosses different allocators memory pools
400     ASSERT(ToUintPtr(rightBorder) - ToUintPtr(leftBorder) == (CrossingMapSingleton::GetCrossingMapGranularity() - 1U));
401     ASSERT((ToUintPtr(rightBorder) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))) ==
402            (ToUintPtr(leftBorder) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))));
403     MemoryBlockHeader *firstMemoryHeader = nullptr;
404     {
405         // Do this under lock because the pointer to the first object in CrossingMap can be changed during
406         // CrossingMap call.
407         os::memory::ReadLockHolder rlock(allocFreeLock_);
408         if (!AllocatedByFreeListAllocatorUnsafe(leftBorder) && !AllocatedByFreeListAllocatorUnsafe(rightBorder)) {
409             LOG_FREELIST_ALLOCATOR(DEBUG) << "This memory range is not covered by this allocator";
410             return;
411         }
412         void *objAddr = AllocConfigT::FindFirstObjInCrossingMap(leftBorder, rightBorder);
413         if (objAddr == nullptr) {
414             return;
415         }
416         ASSERT(AllocatedByFreeListAllocatorUnsafe(objAddr));
417         MemoryBlockHeader *memoryHeader = GetFreeListMemoryHeader(objAddr);
418         // Memory header is a pointer to an object which starts in this range or in the previous.
419         // In the second case, this object may not crosses the border of the current range.
420         // (but there is an object stored after it, which crosses the current range).
421         ASSERT(ToUintPtr(memoryHeader->GetMemory()) <= ToUintPtr(rightBorder));
422         firstMemoryHeader = memoryHeader;
423     }
424     ASSERT(firstMemoryHeader != nullptr);
425     // Let's start iteration:
426     MemoryBlockHeader *currentMemHeader = firstMemoryHeader;
427     LOG_FREELIST_ALLOCATOR(DEBUG) << "FreeListAllocator::IterateOverObjectsInRange start iterating from obj with addr "
428                                   << std::hex << firstMemoryHeader->GetMemory();
429     while (currentMemHeader != nullptr) {
430         // We don't lock allocator because we point to the used block which can't be changed
431         // during the iteration in range.
432         void *objAddr = currentMemHeader->GetMemory();
433         if (ToUintPtr(objAddr) > ToUintPtr(rightBorder)) {
434             // Iteration over
435             break;
436         }
437         LOG_FREELIST_ALLOCATOR(DEBUG)
438             << "FreeListAllocator::IterateOverObjectsInRange found obj in this range with addr " << std::hex << objAddr;
439         memVisitor(static_cast<ObjectHeader *>(objAddr));
440         {
441             os::memory::ReadLockHolder rlock(allocFreeLock_);
442             currentMemHeader = currentMemHeader->GetNextUsedHeader();
443         }
444     }
445     LOG_FREELIST_ALLOCATOR(DEBUG) << "FreeListAllocator::IterateOverObjectsInRange finished";
446 }
447 
448 template <typename AllocConfigT, typename LockConfigT>
CoalesceMemoryBlocks(MemoryBlockHeader * firstBlock,MemoryBlockHeader * secondBlock)449 void FreeListAllocator<AllocConfigT, LockConfigT>::CoalesceMemoryBlocks(MemoryBlockHeader *firstBlock,
450                                                                         MemoryBlockHeader *secondBlock)
451 {
452     LOG_FREELIST_ALLOCATOR(DEBUG) << "CoalesceMemoryBlock: "
453                                   << "first block = " << std::hex << firstBlock << " with size " << std::dec
454                                   << firstBlock->GetSize() << " ; second block = " << std::hex << secondBlock
455                                   << " with size " << std::dec << secondBlock->GetSize();
456     ASSERT(firstBlock != nullptr);
457     ASSERT(firstBlock->GetNextHeader() == secondBlock);
458     ASSERT(secondBlock != nullptr);
459     ASSERT(firstBlock->CanBeCoalescedWithNext() || secondBlock->CanBeCoalescedWithPrev());
460     firstBlock->Initialize(firstBlock->GetSize() + secondBlock->GetSize() + sizeof(MemoryBlockHeader),
461                            firstBlock->GetPrevHeader());
462     if (secondBlock->IsLastBlockInPool()) {
463         LOG_FREELIST_ALLOCATOR(DEBUG) << "CoalesceMemoryBlock: second_block was the last in a pool";
464         firstBlock->SetLastBlockInPool();
465     } else {
466         ASSERT(firstBlock->GetNextHeader() != nullptr);
467         firstBlock->GetNextHeader()->SetPrevHeader(firstBlock);
468     }
469 }
470 
471 template <typename AllocConfigT, typename LockConfigT>
SplitMemoryBlocks(MemoryBlockHeader * memoryBlock,size_t firstBlockSize)472 freelist::MemoryBlockHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SplitMemoryBlocks(
473     MemoryBlockHeader *memoryBlock, size_t firstBlockSize)
474 {
475     ASSERT(memoryBlock->GetSize() > (firstBlockSize + sizeof(MemoryBlockHeader)));
476     ASSERT(!memoryBlock->IsUsed());
477     auto secondBlock =
478         static_cast<MemoryBlockHeader *>(ToVoidPtr(ToUintPtr(memoryBlock->GetMemory()) + firstBlockSize));
479     size_t secondBlockSize = memoryBlock->GetSize() - firstBlockSize - sizeof(MemoryBlockHeader);
480     secondBlock->Initialize(secondBlockSize, memoryBlock);
481     if (memoryBlock->IsLastBlockInPool()) {
482         secondBlock->SetLastBlockInPool();
483     } else {
484         ASSERT(secondBlock->GetNextHeader() != nullptr);
485         secondBlock->GetNextHeader()->SetPrevHeader(secondBlock);
486     }
487     memoryBlock->Initialize(firstBlockSize, memoryBlock->GetPrevHeader());
488     return secondBlock;
489 }
490 
491 template <typename AllocConfigT, typename LockConfigT>
GetFreeListMemoryHeader(void * mem)492 freelist::MemoryBlockHeader *FreeListAllocator<AllocConfigT, LockConfigT>::GetFreeListMemoryHeader(void *mem)
493 {
494     ASSERT(mem != nullptr);
495     auto memoryHeader = static_cast<MemoryBlockHeader *>(ToVoidPtr(ToUintPtr(mem) - sizeof(MemoryBlockHeader)));
496     if (!memoryHeader->IsPaddingHeader()) {
497         // We got correct header of this memory, just return it
498         return memoryHeader;
499     }
500     // This is aligned memory with some free space before the memory pointer
501     // The previous header must be a pointer to correct header of this memory block
502     LOG_FREELIST_ALLOCATOR(DEBUG) << "It is a memory with padding at head";
503     return memoryHeader->GetPrevHeader();
504 }
505 
506 template <typename AllocConfigT, typename LockConfigT>
AllocatedByFreeListAllocator(void * mem)507 bool FreeListAllocator<AllocConfigT, LockConfigT>::AllocatedByFreeListAllocator(void *mem)
508 {
509     os::memory::ReadLockHolder rlock(allocFreeLock_);
510     return AllocatedByFreeListAllocatorUnsafe(mem);
511 }
512 
513 template <typename AllocConfigT, typename LockConfigT>
AllocatedByFreeListAllocatorUnsafe(void * mem)514 bool FreeListAllocator<AllocConfigT, LockConfigT>::AllocatedByFreeListAllocatorUnsafe(void *mem)
515 {
516     // NOTE(aemelenko): Create more complex solution
517     MemoryPoolHeader *currentPool = mempoolHead_;
518     while (currentPool != nullptr) {
519         // This assert means that we asked about memory inside MemoryPoolHeader
520         ASSERT(ToUintPtr(currentPool->GetFirstMemoryHeader()) > ToUintPtr(mem) ||
521                ToUintPtr(currentPool) < ToUintPtr(mem));
522         if ((ToUintPtr(currentPool->GetFirstMemoryHeader()) < ToUintPtr(mem)) &&
523             ((ToUintPtr(currentPool) + currentPool->GetSize()) > ToUintPtr(mem))) {
524             return true;
525         }
526         currentPool = currentPool->GetNext();
527     }
528     return false;
529 }
530 
531 template <typename AllocConfigT, typename LockConfigT>
TryToCoalescing(MemoryBlockHeader * memoryHeader)532 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::TryToCoalescing(MemoryBlockHeader *memoryHeader)
533 {
534     ASSERT(memoryHeader != nullptr);
535     LOG_FREELIST_ALLOCATOR(DEBUG) << "TryToCoalescing memory block";
536     if (memoryHeader->CanBeCoalescedWithNext()) {
537         ASSERT(!memoryHeader->GetNextHeader()->IsUsed());
538         LOG_FREELIST_ALLOCATOR(DEBUG) << "Coalesce with next block";
539         auto nextFreeList = static_cast<FreeListHeader *>(memoryHeader->GetNextHeader());
540         ASSERT(nextFreeList != nullptr);
541         // Pop this free list element from the list
542         nextFreeList->PopFromFreeList();
543         // Combine these two blocks together
544         CoalesceMemoryBlocks(memoryHeader, static_cast<MemoryBlockHeader *>(nextFreeList));
545     }
546     if (memoryHeader->CanBeCoalescedWithPrev()) {
547         ASSERT(!memoryHeader->GetPrevHeader()->IsUsed());
548         LOG_FREELIST_ALLOCATOR(DEBUG) << "Coalesce with prev block";
549         auto prevFreeList = static_cast<FreeListHeader *>(memoryHeader->GetPrevHeader());
550         // Pop this free list element from the list
551         prevFreeList->PopFromFreeList();
552         // Combine these two blocks together
553         CoalesceMemoryBlocks(static_cast<MemoryBlockHeader *>(prevFreeList), memoryHeader);
554         memoryHeader = static_cast<MemoryBlockHeader *>(prevFreeList);
555     }
556     return static_cast<FreeListHeader *>(memoryHeader);
557 }
558 
559 template <typename AllocConfigT, typename LockConfigT>
AddToSegregatedList(FreeListHeader * freeListElement)560 void FreeListAllocator<AllocConfigT, LockConfigT>::AddToSegregatedList(FreeListHeader *freeListElement)
561 {
562     LOG_FREELIST_ALLOCATOR(DEBUG) << "AddToSegregatedList: Add new block into segregated-list with size "
563                                   << freeListElement->GetSize();
564     segregatedList_.AddMemoryBlock(freeListElement);
565 }
566 
567 template <typename AllocConfigT, typename LockConfigT>
GetFromSegregatedList(size_t size,Alignment align)568 freelist::MemoryBlockHeader *FreeListAllocator<AllocConfigT, LockConfigT>::GetFromSegregatedList(size_t size,
569                                                                                                  Alignment align)
570 {
571     LOG_FREELIST_ALLOCATOR(DEBUG) << "GetFromSegregatedList: Try to find memory for size " << size << " with alignment "
572                                   << align;
573     size_t alignedSize = size;
574     if (align != FREELIST_DEFAULT_ALIGNMENT) {
575         // NOTE(aemelenko): This is raw but fast solution with bigger fragmentation.
576         // It's better to add here this value, but I'm not 100% sure about all corner cases.
577         // (GetAlignmentInBytes(align) + sizeof(MemoryBlockHeader) - GetAlignmentInBytes(FREELIST_DEFAULT_ALIGNMENT))
578         alignedSize += (GetAlignmentInBytes(align) + sizeof(MemoryBlockHeader));
579     }
580     FreeListHeader *memBlock = segregatedList_.FindMemoryBlock(alignedSize);
581     if (memBlock != nullptr) {
582         memBlock->PopFromFreeList();
583         ASSERT((AlignUp(ToUintPtr(memBlock->GetMemory()), GetAlignmentInBytes(align)) -
584                 ToUintPtr(memBlock->GetMemory()) + size) <= memBlock->GetSize());
585     }
586     return static_cast<MemoryBlockHeader *>(memBlock);
587 }
588 
589 template <typename AllocConfigT, typename LockConfigT>
Initialize(size_t size,MemoryPoolHeader * prev,MemoryPoolHeader * next)590 ATTRIBUTE_NO_SANITIZE_ADDRESS void FreeListAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::Initialize(
591     size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next)
592 {
593     LOG_FREELIST_ALLOCATOR(DEBUG) << "Init a new memory pool with size " << size << " with prev link = " << std::hex
594                                   << prev << " and next link = " << next;
595     ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
596     size_ = size;
597     prev_ = prev;
598     next_ = next;
599     ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
600 }
601 
602 template <typename AllocConfigT, typename LockConfigT>
AddMemoryBlock(FreeListHeader * freelistHeader)603 void FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::AddMemoryBlock(FreeListHeader *freelistHeader)
604 {
605     size_t size = freelistHeader->GetSize();
606     size_t index = GetIndex(size);
607     if (SEGREGATED_LIST_FAST_INSERT) {
608         freeMemoryBlocks_[index].InsertNext(freelistHeader);
609     } else {
610         FreeListHeader *mostSuitable = FindTheMostSuitableBlockInOrderedList(index, size);
611         // The most suitable block with such must be equal to this size,
612         // or the last with the bigger size in the ordered list,
613         // or nullptr
614         if (mostSuitable == nullptr) {
615             freeMemoryBlocks_[index].InsertNext(freelistHeader);
616         } else {
617             mostSuitable->InsertNext(freelistHeader);
618         }
619     }
620 }
621 
622 template <typename AllocConfigT, typename LockConfigT>
FindMemoryBlock(size_t size)623 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::FindMemoryBlock(size_t size)
624 {
625     size_t index = GetIndex(size);
626     FreeListHeader *head = GetFirstBlock(index);
627     FreeListHeader *suitableBlock = nullptr;
628     if (head != nullptr) {
629         // We have some memory in this range. Try to find suitable block.
630         if (SEGREGATED_LIST_FAST_INSERT) {
631             // We don't have any order in inserting blocks,
632             // and we need to iterate over the whole list
633             suitableBlock = FindSuitableBlockInList(head, size);
634         } else {
635             // All blocks in this list are in descending order.
636             // We can check the first one to determine if we have a block
637             // with this size or not.
638             suitableBlock = TryGetSuitableBlockBySize(head, size, index);
639         }
640     }
641 
642     if (suitableBlock == nullptr) {
643         // We didn't find the block in the head list. Try to find block in other lists.
644         index++;
645         for (; index < SEGREGATED_LIST_SIZE; index++) {
646             if (GetFirstBlock(index) == nullptr) {
647                 continue;
648             }
649             if constexpr (SEGREGATED_LIST_FAST_INSERT || SEGREGATED_LIST_FAST_EXTRACT) {
650                 // Just get the first one
651                 suitableBlock = GetFirstBlock(index);
652             } else {
653                 suitableBlock = FindTheMostSuitableBlockInOrderedList(index, size);
654             }
655             break;
656         }
657     }
658 
659     if (suitableBlock != nullptr) {
660         ASSERT(suitableBlock->GetSize() >= size);
661     }
662 
663     return suitableBlock;
664 }
665 
666 template <typename AllocConfigT, typename LockConfigT>
FindSuitableBlockInList(freelist::FreeListHeader * head,size_t size)667 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::FindSuitableBlockInList(
668     freelist::FreeListHeader *head, size_t size)
669 {
670     FreeListHeader *suitableBlock = nullptr;
671     for (FreeListHeader *current = head; current != nullptr; current = current->GetNextFree()) {
672         if (current->GetSize() < size) {
673             continue;
674         }
675         if constexpr (SEGREGATED_LIST_FAST_EXTRACT) {
676             suitableBlock = current;
677             break;
678         }
679         if (suitableBlock != nullptr) {
680             suitableBlock = current;
681         } else {
682             if (suitableBlock->GetSize() > current->GetSize()) {
683                 suitableBlock = current;
684             }
685         }
686         if (suitableBlock->GetSize() == size) {
687             break;
688         }
689     }
690     return suitableBlock;
691 }
692 
693 template <typename AllocConfigT, typename LockConfigT>
TryGetSuitableBlockBySize(freelist::FreeListHeader * head,size_t size,size_t index)694 freelist::FreeListHeader *FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::TryGetSuitableBlockBySize(
695     freelist::FreeListHeader *head, size_t size, size_t index)
696 {
697     if (head->GetSize() >= size) {
698         if constexpr (SEGREGATED_LIST_FAST_EXTRACT) {
699             // Just get the first element
700             return head;
701         }
702         // Try to find the mist suitable memory for this size.
703         return FindTheMostSuitableBlockInOrderedList(index, size);
704     }
705     return nullptr;
706 }
707 
708 template <typename AllocConfigT, typename LockConfigT>
ReleaseFreeMemoryBlocks()709 void FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::ReleaseFreeMemoryBlocks()
710 {
711     for (size_t index = 0; index < SEGREGATED_LIST_SIZE; index++) {
712         FreeListHeader *current = GetFirstBlock(index);
713         while (current != nullptr) {
714             size_t blockSize = current->GetSize();
715             // Start address from which we can release pages
716             uintptr_t startAddr = AlignUp(ToUintPtr(current) + sizeof(FreeListHeader), os::mem::GetPageSize());
717             // End address before which we can release pages
718             uintptr_t endAddr =
719                 os::mem::AlignDownToPageSize(ToUintPtr(current) + sizeof(MemoryBlockHeader) + blockSize);
720             if (startAddr < endAddr) {
721                 os::mem::ReleasePages(startAddr, endAddr);
722             }
723             current = current->GetNextFree();
724         }
725     }
726 }
727 
728 template <typename AllocConfigT, typename LockConfigT>
729 // CC-OFFNXT(G.FMT.07) project code style
730 // CC-OFFNXT(G.FUD.06) perf critical
731 inline freelist::FreeListHeader *
FindTheMostSuitableBlockInOrderedList(size_t index,size_t size)732 FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::FindTheMostSuitableBlockInOrderedList(size_t index,
733                                                                                                     size_t size)
734 {
735     static_assert(!SEGREGATED_LIST_FAST_INSERT);
736     FreeListHeader *current = GetFirstBlock(index);
737     if (current == nullptr) {
738         return nullptr;
739     }
740     size_t currentSize = current->GetSize();
741     if (currentSize < size) {
742         return nullptr;
743     }
744     while (currentSize != size) {
745         FreeListHeader *next = current->GetNextFree();
746         if (next == nullptr) {
747             // the current free list header is the last in the list.
748             break;
749         }
750         size_t nextSize = next->GetSize();
751         if (nextSize < size) {
752             // the next free list header is less than size,
753             // so we don't need to iterate anymore.
754             break;
755         }
756         current = next;
757         currentSize = nextSize;
758     }
759     return current;
760 }
761 
762 template <typename AllocConfigT, typename LockConfigT>
GetIndex(size_t size)763 size_t FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::GetIndex(size_t size)
764 {
765     ASSERT(size >= FREELIST_ALLOCATOR_MIN_SIZE);
766     size_t index = (size - FREELIST_ALLOCATOR_MIN_SIZE) / SEGREGATED_LIST_FREE_BLOCK_RANGE;
767     return index < SEGREGATED_LIST_SIZE ? index : SEGREGATED_LIST_SIZE - 1;
768 }
769 
770 template <typename AllocConfigT, typename LockConfigT>
ContainObject(const ObjectHeader * obj)771 bool FreeListAllocator<AllocConfigT, LockConfigT>::ContainObject(const ObjectHeader *obj)
772 {
773     return AllocatedByFreeListAllocatorUnsafe(const_cast<ObjectHeader *>(obj));
774 }
775 
776 template <typename AllocConfigT, typename LockConfigT>
IsLive(const ObjectHeader * obj)777 bool FreeListAllocator<AllocConfigT, LockConfigT>::IsLive(const ObjectHeader *obj)
778 {
779     ASSERT(ContainObject(obj));
780     void *objMem = static_cast<void *>(const_cast<ObjectHeader *>(obj));
781     // Get start address of pool via PoolManager for input object
782     // for avoid iteration over all pools in allocator
783     auto memPoolHeader =
784         static_cast<MemoryPoolHeader *>(PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(objMem));
785     [[maybe_unused]] auto allocInfo =
786         PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(static_cast<void *>(memPoolHeader));
787     ASSERT(allocInfo.GetAllocatorHeaderAddr() == static_cast<const void *>(this));
788     MemoryBlockHeader *currentMemHeader = memPoolHeader->GetFirstMemoryHeader();
789     while (currentMemHeader != nullptr) {
790         if (currentMemHeader->IsUsed()) {
791             if (currentMemHeader->GetMemory() == objMem) {
792                 return true;
793             }
794         }
795         currentMemHeader = currentMemHeader->GetNextUsedHeader();
796     }
797     return false;
798 }
799 
800 template <typename AllocConfigT, typename LockConfigT>
CalculateExternalFragmentation()801 double FreeListAllocator<AllocConfigT, LockConfigT>::CalculateExternalFragmentation()
802 {
803     return segregatedList_.CalculateExternalFragmentation();
804 }
805 
806 template <typename AllocConfigT, typename LockConfigT>
CalculateExternalFragmentation()807 double FreeListAllocator<AllocConfigT, LockConfigT>::SegregatedList::CalculateExternalFragmentation()
808 {
809     size_t totalFreeSize = 0;
810     size_t maxFreeBlockSize = 0;
811     for (size_t index = 0; index < SEGREGATED_LIST_SIZE; index++) {
812         auto *current = GetFirstBlock(index);
813         while (current != nullptr) {
814             auto blockSize = current->GetSize();
815             totalFreeSize += blockSize;
816             if (blockSize > maxFreeBlockSize) {
817                 maxFreeBlockSize = blockSize;
818             }
819             current = current->GetNextFree();
820         }
821     }
822 
823     if (totalFreeSize == 0) {
824         return 0;
825     }
826 
827     return 1.0 - static_cast<double>(maxFreeBlockSize) / totalFreeSize;
828 }
829 
830 #undef LOG_FREELIST_ALLOCATOR
831 
832 }  // namespace ark::mem
833 
834 #endif  // PANDA_MEM_FREELIST_ALLOCATOR_INL_H
835