1 /** 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 #ifndef PANDA_MEM_FREELIST_ALLOCATOR_H 16 #define PANDA_MEM_FREELIST_ALLOCATOR_H 17 18 #include <array> 19 20 #include "libpandabase/mem/mem.h" 21 #include "libpandabase/mem/space.h" 22 #include "libpandabase/os/mutex.h" 23 #include "runtime/mem/freelist.h" 24 #include "runtime/mem/runslots.h" 25 #include "runtime/mem/lock_config_helper.h" 26 27 namespace panda::mem { 28 29 // TODO(aemelenko): Move this constants to compile options 30 // Minimal size of this allocator is a max size of RunSlots allocator. 31 static constexpr size_t PANDA_FREELIST_ALLOCATOR_MIN_SIZE = RunSlots<>::MaxSlotSize(); 32 static constexpr size_t PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_SIZE = 16; 33 static constexpr bool PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_INSERT = false; 34 static constexpr bool PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_EXTRACT = false; 35 36 static constexpr Alignment FREELIST_DEFAULT_ALIGNMENT = DEFAULT_ALIGNMENT; 37 38 static constexpr size_t FREELIST_ALLOCATOR_MIN_SIZE = PANDA_FREELIST_ALLOCATOR_MIN_SIZE; 39 static_assert(FREELIST_ALLOCATOR_MIN_SIZE >= (sizeof(freelist::FreeListHeader) - sizeof(freelist::MemoryBlockHeader))); 40 41 class FreeListAllocatorLockConfig { 42 public: 43 using CommonLock = os::memory::RWLock; 44 using DummyLock = os::memory::DummyLock; 45 46 template <MTModeT MTMode> 47 using ParameterizedLock = typename LockConfigHelper<FreeListAllocatorLockConfig, MTMode>::Value; 48 }; 49 50 template <typename T, typename AllocConfigT, typename LockConfigT> 51 class FreeListAllocatorAdapter; 52 enum class InternalAllocatorConfig; 53 template <InternalAllocatorConfig Config> 54 class InternalAllocator; 55 56 // FreeList Allocator layout: 57 // 58 // |..........|xxxxxxxxxx|..........|........|00000000|..........|xxxxxxxxxx|..........|........|0000000000000000| 59 // |..........|xxxxxxxxxx|..........|..Links.|00000000|..........|xxxxxxxxxx|..........|..Links.|0000000000000000| 60 // |..Memory..|xOCCUPIEDx|..Memory..|...on...|00FREE00|..Memory..|xOCCUPIEDx|..Memory..|...on...|000000FREE000000| 61 // |..Header..|xxMEMORYxx|..Header..|..next/.|0MEMORY0|..Header..|xxMEMORYxx|..Header..|..next/.|00000MEMORY00000| 62 // |..........|xxxxxxxxxx|..........|..prev..|00000000|..........|xxxxxxxxxx|..........|..prev..|0000000000000000| 63 // |..........|xxxxxxxxxx|..........|..free..|00000000|..........|xxxxxxxxxx|..........|..free..|0000000000000000| 64 // 65 // Blocks with alignments: 66 // 1) Padding header stored just after the main block header: 67 // |..........||..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| 68 // |..........||..........||xxxxxxxxxxALIGNEDxxxxxxxxxxxxxx| 69 // |..Memory..||.Padding..||xxxxxxxxxxOCCUPIEDxxxxxxxxxxxxx| 70 // |..Header..||..Header..||xxxxxxxxxxxMEMORYxxxxxxxxxxxxxx| 71 // |..........||..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| 72 // |..........||..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| 73 // 74 // 2) We have padding size after the main block header: 75 // |..........|........|--------|..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| 76 // |..........|........|--------|..........||xxxxxxxxxxALIGNEDxxxxxxxxxxxxxx| 77 // |..Memory..|.Padding|--------|.Padding..||xxxxxxxxxxOCCUPIEDxxxxxxxxxxxxx| 78 // |..Header..|..Size..|--------|..Header..||xxxxxxxxxxxMEMORYxxxxxxxxxxxxxx| 79 // |..........|........|--------|..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| 80 // |..........|........|--------|..........||xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| 81 82 template <typename AllocConfigT, typename LockConfigT = FreeListAllocatorLockConfig::CommonLock> 83 class FreeListAllocator { 84 public: 85 explicit FreeListAllocator(MemStatsType *mem_stats, SpaceType type_allocation = SpaceType::SPACE_TYPE_OBJECT); 86 ~FreeListAllocator(); 87 NO_COPY_SEMANTIC(FreeListAllocator); 88 NO_MOVE_SEMANTIC(FreeListAllocator); 89 90 template <typename T, typename... Args> New(Args &&...args)91 [[nodiscard]] T *New(Args &&... args) 92 { 93 auto p = reinterpret_cast<void *>(Alloc(sizeof(T))); 94 new (p) T(std::forward<Args>(args)...); 95 return reinterpret_cast<T *>(p); 96 } 97 98 template <typename T> 99 [[nodiscard]] T *AllocArray(size_t arr_length); 100 101 template <bool need_lock = true> 102 [[nodiscard]] void *Alloc(size_t size, Alignment align = FREELIST_DEFAULT_ALIGNMENT); 103 104 void Free(void *mem); 105 106 void Collect(const GCObjectVisitor &death_checker_fn); 107 108 bool AddMemoryPool(void *mem, size_t size); 109 110 /** 111 * \brief Iterates over all objects allocated by this allocator. 112 * @tparam MemVisitor 113 * @param object_visitor - function pointer or functor 114 */ 115 template <typename ObjectVisitor> 116 void IterateOverObjects(const ObjectVisitor &object_visitor); 117 118 /** 119 * \brief Iterates over all memory pools used by this allocator 120 * and remove them from the allocator structure. 121 * NOTE: This method can't be used to clear all internal allocator 122 * information and reuse the allocator somewhere else. 123 * @tparam MemVisitor 124 * @param mem_visitor - function pointer or functor 125 */ 126 template <typename MemVisitor> 127 void VisitAndRemoveAllPools(const MemVisitor &mem_visitor); 128 129 /** 130 * \brief Visit memory pools that can be returned to the system in this allocator 131 * and remove them from the allocator structure. 132 * @tparam MemVisitor 133 * @param mem_visitor - function pointer or functor 134 */ 135 template <typename MemVisitor> 136 void VisitAndRemoveFreePools(const MemVisitor &mem_visitor); 137 138 /** 139 * \brief Iterates over objects in the range inclusively. 140 * @tparam MemVisitor 141 * @param mem_visitor - function pointer or functor 142 * @param left_border - a pointer to the first byte of the range 143 * @param right_border - a pointer to the last byte of the range 144 */ 145 template <typename MemVisitor> 146 void IterateOverObjectsInRange(const MemVisitor &mem_visitor, void *left_border, void *right_border); 147 148 FreeListAllocatorAdapter<void, AllocConfigT, LockConfigT> Adapter(); 149 150 /** 151 * \brief returns maximum size which can be allocated by this allocator 152 * @return 153 */ GetMaxSize()154 static constexpr size_t GetMaxSize() 155 { 156 return FREELIST_MAX_ALLOC_SIZE; 157 } 158 159 /** 160 * \brief returns minimum pool size which can be added to this allocator 161 * @return 162 */ GetMinPoolSize()163 static constexpr size_t GetMinPoolSize() 164 { 165 return FREELIST_DEFAULT_MEMORY_POOL_SIZE; 166 } 167 PoolAlign()168 static constexpr size_t PoolAlign() 169 { 170 return sizeof(MemoryBlockHeader); 171 } 172 173 bool ContainObject(const ObjectHeader *obj); 174 175 bool IsLive(const ObjectHeader *obj); 176 GetAllocatorType()177 static constexpr AllocatorType GetAllocatorType() 178 { 179 return AllocatorType::FREELIST_ALLOCATOR; 180 } 181 182 private: 183 using MemoryBlockHeader = panda::mem::freelist::MemoryBlockHeader; 184 using FreeListHeader = panda::mem::freelist::FreeListHeader; 185 186 class alignas(sizeof(MemoryBlockHeader)) MemoryPoolHeader { 187 public: 188 void Initialize(size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next); 189 190 ATTRIBUTE_NO_SANITIZE_ADDRESS GetPrev()191 MemoryPoolHeader *GetPrev() 192 { 193 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 194 MemoryPoolHeader *prev = prev_; 195 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 196 return prev; 197 } 198 199 ATTRIBUTE_NO_SANITIZE_ADDRESS GetNext()200 MemoryPoolHeader *GetNext() 201 { 202 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 203 MemoryPoolHeader *next = next_; 204 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 205 return next; 206 } 207 208 ATTRIBUTE_NO_SANITIZE_ADDRESS SetPrev(MemoryPoolHeader * prev)209 void SetPrev(MemoryPoolHeader *prev) 210 { 211 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 212 prev_ = prev; 213 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 214 } 215 216 ATTRIBUTE_NO_SANITIZE_ADDRESS SetNext(MemoryPoolHeader * next)217 void SetNext(MemoryPoolHeader *next) 218 { 219 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 220 next_ = next; 221 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 222 } 223 224 ATTRIBUTE_NO_SANITIZE_ADDRESS GetFirstMemoryHeader()225 MemoryBlockHeader *GetFirstMemoryHeader() 226 { 227 return static_cast<MemoryBlockHeader *>(ToVoidPtr(ToUintPtr(this) + sizeof(MemoryPoolHeader))); 228 } 229 230 ATTRIBUTE_NO_SANITIZE_ADDRESS GetSize()231 size_t GetSize() 232 { 233 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 234 size_t size = size_; 235 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 236 return size; 237 } 238 239 private: 240 MemoryPoolHeader *prev_ {nullptr}; 241 MemoryPoolHeader *next_ {nullptr}; 242 size_t size_ {0}; 243 }; 244 245 static_assert((sizeof(MemoryPoolHeader) % sizeof(MemoryBlockHeader)) == 0); 246 static constexpr size_t FREELIST_DEFAULT_MEMORY_POOL_SIZE = PANDA_DEFAULT_ALLOCATOR_POOL_SIZE; 247 static constexpr size_t FREELIST_MAX_ALLOC_SIZE = 248 ((FREELIST_DEFAULT_MEMORY_POOL_SIZE - sizeof(MemoryPoolHeader)) / 2) - sizeof(MemoryBlockHeader); 249 250 class SegregatedList { 251 public: 252 void AddMemoryBlock(FreeListHeader *freelist_header); 253 FreeListHeader *FindMemoryBlock(size_t size); 254 void ReleaseFreeMemoryBlocks(); 255 256 private: 257 static constexpr size_t SEGREGATED_LIST_SIZE = PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_SIZE; 258 static constexpr size_t SEGREGATED_LIST_FREE_BLOCK_RANGE = 259 (FREELIST_MAX_ALLOC_SIZE - FREELIST_ALLOCATOR_MIN_SIZE) / SEGREGATED_LIST_SIZE; 260 // If it is off, we insert memory in the list in the descending order. 261 static constexpr bool SEGREGATED_LIST_FAST_INSERT = PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_INSERT; 262 // If it is off, we try to find the most suitable block in the list. 263 static constexpr bool SEGREGATED_LIST_FAST_EXTRACT = PANDA_FREELIST_ALLOCATOR_SEGREGATED_LIST_FAST_EXTRACT; 264 static_assert(((FREELIST_MAX_ALLOC_SIZE - FREELIST_ALLOCATOR_MIN_SIZE) % SEGREGATED_LIST_SIZE) == 0); 265 266 size_t GetIndex(size_t size); 267 GetFirstBlock(size_t index)268 FreeListHeader *GetFirstBlock(size_t index) 269 { 270 ASSERT(index < SEGREGATED_LIST_SIZE); 271 return free_memory_blocks_[index].GetNextFree(); 272 } 273 SetFirstBlock(size_t index,FreeListHeader * new_head)274 void SetFirstBlock(size_t index, FreeListHeader *new_head) 275 { 276 ASSERT(index < SEGREGATED_LIST_SIZE); 277 free_memory_blocks_[index].SetNextFree(new_head); 278 } 279 280 FreeListHeader *FindTheMostSuitableBlockInOrderedList(size_t index, size_t size); 281 282 // Each element of this array consists of memory blocks with size 283 // from (FREELIST_ALLOCATOR_MIN_SIZE + SEGREGATED_LIST_FREE_BLOCK_RANGE * (N)) 284 // to (FREELIST_ALLOCATOR_MIN_SIZE + SEGREGATED_LIST_FREE_BLOCK_RANGE * (N + 1)) (not inclusive) 285 // where N is the element number in this array. 286 std::array<FreeListHeader, SEGREGATED_LIST_SIZE> free_memory_blocks_; 287 }; 288 289 MemoryBlockHeader *GetFreeListMemoryHeader(void *mem); 290 291 bool AllocatedByFreeListAllocator(void *mem); 292 293 bool AllocatedByFreeListAllocatorUnsafe(void *mem); 294 295 // Try to coalesce a memory block with the next and previous blocks. 296 FreeListHeader *TryToCoalescing(MemoryBlockHeader *memory_header); 297 298 // Coalesce two neighboring memory blocks into one. 299 void CoalesceMemoryBlocks(MemoryBlockHeader *first_block, MemoryBlockHeader *second_block); 300 301 /** 302 * \brief Divide memory_block into two - the first with first_block_size. 303 * @param memory_block - a pointer to the divided block, first_block_size - size of the first part 304 * @return the second memory block header 305 */ 306 MemoryBlockHeader *SplitMemoryBlocks(MemoryBlockHeader *memory_block, size_t first_block_size); 307 308 void AddToSegregatedList(FreeListHeader *free_list_element); 309 310 MemoryBlockHeader *GetFromSegregatedList(size_t size, Alignment align); 311 CanCreateNewBlockFromRemainder(MemoryBlockHeader * memory,size_t alloc_size)312 bool CanCreateNewBlockFromRemainder(MemoryBlockHeader *memory, size_t alloc_size) 313 { 314 // TODO(aemelenko): Maybe add some more complex solution for this check 315 return (memory->GetSize() - alloc_size) >= (FREELIST_ALLOCATOR_MIN_SIZE + sizeof(FreeListHeader)); 316 } 317 318 void FreeUnsafe(void *mem); 319 320 SegregatedList segregated_list_; 321 322 // Links to head and tail of the memory pool headers 323 MemoryPoolHeader *mempool_head_ {nullptr}; 324 MemoryPoolHeader *mempool_tail_ {nullptr}; 325 SpaceType type_allocation_; 326 327 // RW lock which allows only one thread to change smth inside allocator 328 // NOTE: The MT support expects that we can't iterate 329 // and free (i.e. collect for an object scenario) simultaneously 330 LockConfigT alloc_free_lock_; 331 332 MemStatsType *mem_stats_; 333 334 friend class FreeListAllocatorTest; 335 friend class HybridObjectAllocatorTest; 336 template <InternalAllocatorConfig Config> 337 friend class InternalAllocator; 338 }; 339 340 } // namespace panda::mem 341 342 #endif // PANDA_MEM_FREELIST_ALLOCATOR_H 343