• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_MEM_FREELIST_ALLOCATOR_H
16 #define PANDA_MEM_FREELIST_ALLOCATOR_H
17 
18 #include <array>
19 
20 #include "libpandabase/mem/mem.h"
21 #include "libpandabase/mem/space.h"
22 #include "libpandabase/os/mutex.h"
23 #include "runtime/mem/freelist.h"
24 #include "runtime/mem/runslots.h"
25 #include "runtime/mem/lock_config_helper.h"
26 
27 namespace ark::mem {
28 
29 // NOTE(aemelenko): Move this constants to compile options
30 // Minimal size of this allocator is a max size of RunSlots allocator.
31 static constexpr size_t PANDA_FREELIST_ALLOCATOR_MIN_SIZE = RunSlots<>::MaxSlotSize();
32 static constexpr size_t PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_SIZE = 16;
33 static constexpr bool PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_INSERT = false;
34 static constexpr bool PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_EXTRACT = false;
35 
36 static constexpr Alignment FREELIST_DEFAULT_ALIGNMENT = DEFAULT_ALIGNMENT;
37 
38 static constexpr size_t FREELIST_ALLOCATOR_MIN_SIZE = PANDA_FREELIST_ALLOCATOR_MIN_SIZE;
39 static_assert(FREELIST_ALLOCATOR_MIN_SIZE >= (sizeof(freelist::FreeListHeader) - sizeof(freelist::MemoryBlockHeader)));
40 
41 class FreeListAllocatorLockConfig {
42 public:
43     using CommonLock = os::memory::RWLock;
44     using DummyLock = os::memory::DummyLock;
45 
46     template <MTModeT MT_MODE>
47     using ParameterizedLock = typename LockConfigHelper<FreeListAllocatorLockConfig, MT_MODE>::Value;
48 };
49 
50 template <typename T, typename AllocConfigT, typename LockConfigT>
51 class FreeListAllocatorAdapter;
52 enum class InternalAllocatorConfig;
53 template <InternalAllocatorConfig CONFIG>
54 class InternalAllocator;
55 
56 //                                             FreeList Allocator layout:
57 //
58 //     |..........|xxxxxxxxxx|..........|........|00000000|..........|xxxxxxxxxx|..........|........|0000000000000000|
59 //     |..........|xxxxxxxxxx|..........|..Links.|00000000|..........|xxxxxxxxxx|..........|..Links.|0000000000000000|
60 //     |..Memory..|xOCCUPIEDx|..Memory..|...on...|00FREE00|..Memory..|xOCCUPIEDx|..Memory..|...on...|000000FREE000000|
61 //     |..Header..|xxMEMORYxx|..Header..|..next/.|0MEMORY0|..Header..|xxMEMORYxx|..Header..|..next/.|00000MEMORY00000|
62 //     |..........|xxxxxxxxxx|..........|..prev..|00000000|..........|xxxxxxxxxx|..........|..prev..|0000000000000000|
63 //     |..........|xxxxxxxxxx|..........|..free..|00000000|..........|xxxxxxxxxx|..........|..free..|0000000000000000|
64 //
65 //                        Blocks with alignments:
66 // 1) Padding header stored just after the main block header:
67 //     |..........||..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx|
68 //     |..........||..........||xxxxxxxxxxALIGNEDxxxxxxxxxxxxxx|
69 //     |..Memory..||.Padding..||xxxxxxxxxxOCCUPIEDxxxxxxxxxxxxx|
70 //     |..Header..||..Header..||xxxxxxxxxxxMEMORYxxxxxxxxxxxxxx|
71 //     |..........||..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx|
72 //     |..........||..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx|
73 //
74 // 2) We have padding size after the main block header:
75 //     |..........|........|--------|..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx|
76 //     |..........|........|--------|..........||xxxxxxxxxxALIGNEDxxxxxxxxxxxxxx|
77 //     |..Memory..|.Padding|--------|.Padding..||xxxxxxxxxxOCCUPIEDxxxxxxxxxxxxx|
78 //     |..Header..|..Size..|--------|..Header..||xxxxxxxxxxxMEMORYxxxxxxxxxxxxxx|
79 //     |..........|........|--------|..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx|
80 //     |..........|........|--------|..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx|
81 
82 template <typename AllocConfigT, typename LockConfigT = FreeListAllocatorLockConfig::CommonLock>
83 class FreeListAllocator {
84 public:
85     explicit FreeListAllocator(MemStatsType *memStats, SpaceType typeAllocation = SpaceType::SPACE_TYPE_OBJECT);
86     ~FreeListAllocator();
87     NO_COPY_SEMANTIC(FreeListAllocator);
88     NO_MOVE_SEMANTIC(FreeListAllocator);
89 
90     template <typename T, typename... Args>
New(Args &&...args)91     [[nodiscard]] T *New(Args &&...args)
92     {
93         auto p = reinterpret_cast<void *>(Alloc(sizeof(T)));
94         new (p) T(std::forward<Args>(args)...);
95         return reinterpret_cast<T *>(p);
96     }
97 
98     template <typename T>
99     [[nodiscard]] T *AllocArray(size_t arrLength);
100 
101     template <bool NEED_LOCK = true>
102     [[nodiscard]] void *Alloc(size_t size, Alignment align = FREELIST_DEFAULT_ALIGNMENT);
103 
104     void Free(void *mem);
105 
106     void Collect(const GCObjectVisitor &deathCheckerFn);
107 
108     bool AddMemoryPool(void *mem, size_t size);
109 
110     /**
111      * @brief Iterates over all objects allocated by this allocator.
112      * @tparam MemVisitor
113      * @param object_visitor - function pointer or functor
114      */
115     template <typename ObjectVisitor>
116     void IterateOverObjects(const ObjectVisitor &objectVisitor);
117 
118     /**
119      * @brief Iterates over all memory pools used by this allocator
120      * and remove them from the allocator structure.
121      * NOTE: This method can't be used to clear all internal allocator
122      * information and reuse the allocator somewhere else.
123      * @tparam MemVisitor
124      * @param mem_visitor - function pointer or functor
125      */
126     template <typename MemVisitor>
127     void VisitAndRemoveAllPools(const MemVisitor &memVisitor);
128 
129     /**
130      * @brief Visit memory pools that can be returned to the system in this allocator
131      * and remove them from the allocator structure.
132      * @tparam MemVisitor
133      * @param mem_visitor - function pointer or functor
134      */
135     template <typename MemVisitor>
136     void VisitAndRemoveFreePools(const MemVisitor &memVisitor);
137 
138     /**
139      * @brief Iterates over objects in the range inclusively.
140      * @tparam MemVisitor
141      * @param mem_visitor - function pointer or functor
142      * @param left_border - a pointer to the first byte of the range
143      * @param right_border - a pointer to the last byte of the range
144      */
145     template <typename MemVisitor>
146     void IterateOverObjectsInRange(const MemVisitor &memVisitor, void *leftBorder, void *rightBorder);
147 
148     FreeListAllocatorAdapter<void, AllocConfigT, LockConfigT> Adapter();
149 
150     /**
151      * @brief returns maximum size which can be allocated by this allocator
152      * @return
153      */
GetMaxSize()154     static constexpr size_t GetMaxSize()
155     {
156         return FREELIST_MAX_ALLOC_SIZE;
157     }
158 
159     /**
160      * @brief returns minimum pool size which can be added to this allocator
161      * @return
162      */
GetMinPoolSize()163     static constexpr size_t GetMinPoolSize()
164     {
165         return FREELIST_DEFAULT_MEMORY_POOL_SIZE;
166     }
167 
PoolAlign()168     static constexpr size_t PoolAlign()
169     {
170         return sizeof(MemoryBlockHeader);
171     }
172 
173     bool ContainObject(const ObjectHeader *obj);
174 
175     bool IsLive(const ObjectHeader *obj);
176 
GetAllocatorType()177     static constexpr AllocatorType GetAllocatorType()
178     {
179         return AllocatorType::FREELIST_ALLOCATOR;
180     }
181 
182     double CalculateExternalFragmentation();
183 
184 #ifdef PANDA_MEASURE_FRAGMENTATION
GetAllocatedBytes()185     size_t GetAllocatedBytes() const
186     {
187         // Atomic with relaxed order reason: order is not required
188         return allocatedBytes_.load(std::memory_order_relaxed);
189     }
190 #endif
191 
192 private:
193     using MemoryBlockHeader = ark::mem::freelist::MemoryBlockHeader;
194     using FreeListHeader = ark::mem::freelist::FreeListHeader;
195 
196     class alignas(sizeof(MemoryBlockHeader)) MemoryPoolHeader {
197     public:
198         void Initialize(size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next);
199 
200         ATTRIBUTE_NO_SANITIZE_ADDRESS
GetPrev()201         MemoryPoolHeader *GetPrev()
202         {
203             ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
204             MemoryPoolHeader *prev = prev_;
205             ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
206             return prev;
207         }
208 
209         ATTRIBUTE_NO_SANITIZE_ADDRESS
GetNext()210         MemoryPoolHeader *GetNext()
211         {
212             ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
213             MemoryPoolHeader *next = next_;
214             ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
215             return next;
216         }
217 
218         ATTRIBUTE_NO_SANITIZE_ADDRESS
SetPrev(MemoryPoolHeader * prev)219         void SetPrev(MemoryPoolHeader *prev)
220         {
221             ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
222             prev_ = prev;
223             ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
224         }
225 
226         ATTRIBUTE_NO_SANITIZE_ADDRESS
SetNext(MemoryPoolHeader * next)227         void SetNext(MemoryPoolHeader *next)
228         {
229             ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
230             next_ = next;
231             ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
232         }
233 
234         ATTRIBUTE_NO_SANITIZE_ADDRESS
GetFirstMemoryHeader()235         MemoryBlockHeader *GetFirstMemoryHeader()
236         {
237             return static_cast<MemoryBlockHeader *>(ToVoidPtr(ToUintPtr(this) + sizeof(MemoryPoolHeader)));
238         }
239 
240         ATTRIBUTE_NO_SANITIZE_ADDRESS
GetSize()241         size_t GetSize()
242         {
243             ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
244             size_t size = size_;
245             ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
246             return size;
247         }
248 
249     private:
250         MemoryPoolHeader *prev_ {nullptr};
251         MemoryPoolHeader *next_ {nullptr};
252         size_t size_ {0};
253     };
254 
255     static_assert((sizeof(MemoryPoolHeader) % sizeof(MemoryBlockHeader)) == 0);
256     static constexpr size_t FREELIST_DEFAULT_MEMORY_POOL_SIZE = PANDA_DEFAULT_ALLOCATOR_POOL_SIZE;
257     static constexpr size_t FREELIST_MAX_ALLOC_SIZE =
258         ((FREELIST_DEFAULT_MEMORY_POOL_SIZE - sizeof(MemoryPoolHeader)) / 2) - sizeof(MemoryBlockHeader);
259 
260     class SegregatedList {
261     public:
262         void AddMemoryBlock(FreeListHeader *freelistHeader);
263         FreeListHeader *FindMemoryBlock(size_t size);
264         void ReleaseFreeMemoryBlocks();
265         double CalculateExternalFragmentation();
266 
267     private:
268         static constexpr size_t SEGREGATED_LIST_SIZE = PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_SIZE;
269         static constexpr size_t SEGREGATED_LIST_FREE_BLOCK_RANGE =
270             (FREELIST_MAX_ALLOC_SIZE - FREELIST_ALLOCATOR_MIN_SIZE) / SEGREGATED_LIST_SIZE;
271         // If it is off, we insert memory in the list in the descending order.
272         static constexpr bool SEGREGATED_LIST_FAST_INSERT = PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_INSERT;
273         // If it is off, we try to find the most suitable block in the list.
274         static constexpr bool SEGREGATED_LIST_FAST_EXTRACT = PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_EXTRACT;
275         static_assert(((FREELIST_MAX_ALLOC_SIZE - FREELIST_ALLOCATOR_MIN_SIZE) % SEGREGATED_LIST_SIZE) == 0);
276 
277         size_t GetIndex(size_t size);
278 
GetFirstBlock(size_t index)279         FreeListHeader *GetFirstBlock(size_t index)
280         {
281             ASSERT(index < SEGREGATED_LIST_SIZE);
282             return freeMemoryBlocks_[index].GetNextFree();
283         }
284 
SetFirstBlock(size_t index,FreeListHeader * newHead)285         void SetFirstBlock(size_t index, FreeListHeader *newHead)
286         {
287             ASSERT(index < SEGREGATED_LIST_SIZE);
288             freeMemoryBlocks_[index].SetNextFree(newHead);
289         }
290 
291         FreeListHeader *FindTheMostSuitableBlockInOrderedList(size_t index, size_t size);
292 
293         FreeListHeader *FindSuitableBlockInList(FreeListHeader *head, size_t size);
294 
295         inline FreeListHeader *TryGetSuitableBlockBySize(FreeListHeader *head, size_t size, size_t index);
296 
297         // Each element of this array consists of memory blocks with size
298         // from (FREELIST_ALLOCATOR_MIN_SIZE + SEGREGATED_LIST_FREE_BLOCK_RANGE * (N))
299         // to   (FREELIST_ALLOCATOR_MIN_SIZE + SEGREGATED_LIST_FREE_BLOCK_RANGE * (N + 1)) (not inclusive)
300         // where N is the element number in this array.
301         std::array<FreeListHeader, SEGREGATED_LIST_SIZE> freeMemoryBlocks_;
302     };
303 
304     MemoryBlockHeader *GetFreeListMemoryHeader(void *mem);
305 
306     bool AllocatedByFreeListAllocator(void *mem);
307 
308     bool AllocatedByFreeListAllocatorUnsafe(void *mem);
309 
310     // Try to coalesce a memory block with the next and previous blocks.
311     FreeListHeader *TryToCoalescing(MemoryBlockHeader *memoryHeader);
312 
313     // Coalesce two neighboring memory blocks into one.
314     void CoalesceMemoryBlocks(MemoryBlockHeader *firstBlock, MemoryBlockHeader *secondBlock);
315 
316     /**
317      * @brief Divide memory_block into two - the first with first_block_size.
318      * @param memory_block - a pointer to the divided block, first_block_size - size of the first part
319      * @return the second memory block header
320      */
321     MemoryBlockHeader *SplitMemoryBlocks(MemoryBlockHeader *memoryBlock, size_t firstBlockSize);
322 
323     void AddToSegregatedList(FreeListHeader *freeListElement);
324 
325     MemoryBlockHeader *GetFromSegregatedList(size_t size, Alignment align);
326 
CanCreateNewBlockFromRemainder(MemoryBlockHeader * memory,size_t allocSize)327     bool CanCreateNewBlockFromRemainder(MemoryBlockHeader *memory, size_t allocSize)
328     {
329         // NOTE(aemelenko): Maybe add some more complex solution for this check
330         return (memory->GetSize() - allocSize) >= (FREELIST_ALLOCATOR_MIN_SIZE + sizeof(FreeListHeader));
331     }
332 
333     void FreeUnsafe(void *mem);
334 
335     SegregatedList segregatedList_;
336 
337     // Links to head and tail of the memory pool headers
338     MemoryPoolHeader *mempoolHead_ {nullptr};
339     MemoryPoolHeader *mempoolTail_ {nullptr};
340     SpaceType typeAllocation_;
341 
342     // RW lock which allows only one thread to change smth inside allocator
343     // NOTE: The MT support expects that we can't iterate
344     // and free (i.e. collect for an object scenario) simultaneously
345     LockConfigT allocFreeLock_;
346 
347     MemStatsType *memStats_;
348 
349 #ifdef PANDA_MEASURE_FRAGMENTATION
350     std::atomic_size_t allocatedBytes_ {0};
351 #endif
352 
353     friend class FreeListAllocatorTest;
354     template <InternalAllocatorConfig CONFIG>
355     friend class InternalAllocator;
356 };
357 
358 }  // namespace ark::mem
359 
360 #endif  // PANDA_MEM_FREELIST_ALLOCATOR_H
361