1 /** 2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 #ifndef PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_H 16 #define PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_H 17 18 #include <limits> 19 20 #include "libpandabase/macros.h" 21 #include "libpandabase/mem/mem.h" 22 #include "libpandabase/mem/pool_manager.h" 23 #include "libpandabase/mem/space.h" 24 #include "libpandabase/utils/logger.h" 25 #include "runtime/mem/runslots.h" 26 #include "runtime/mem/lock_config_helper.h" 27 28 namespace ark::mem { 29 30 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 31 #define LOG_HUMONGOUS_OBJ_ALLOCATOR(level) LOG(level, ALLOC) << "HumongousObjAllocator: " 32 33 // NOTE(aemelenko): Move this constants to compile options 34 static constexpr size_t PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOLS_AMOUNT = 0; 35 static constexpr size_t PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOL_SIZE = 8_MB; 36 37 class HumongousObjAllocatorLockConfig { 38 public: 39 using CommonLock = os::memory::RWLock; 40 using DummyLock = os::memory::DummyLock; 41 42 template <MTModeT MT_MODE> 43 using ParameterizedLock = typename LockConfigHelper<HumongousObjAllocatorLockConfig, MT_MODE>::Value; 44 }; 45 46 template <typename T, typename AllocConfigT, typename LockConfigT> 47 class HumongousObjAllocatorAdapter; 48 enum class InternalAllocatorConfig; 49 template <InternalAllocatorConfig CONFIG> 50 class InternalAllocator; 51 52 /** 53 * HumongousObjAllocator is an allocator used for huge objects which require 54 * using the whole memory pool for each. 55 */ 56 template <typename AllocConfigT, typename LockConfigT = HumongousObjAllocatorLockConfig::CommonLock> 57 class HumongousObjAllocator { 58 public: 59 explicit HumongousObjAllocator(MemStatsType *memStats, 60 SpaceType typeAllocation = SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT); 61 ~HumongousObjAllocator(); 62 NO_COPY_SEMANTIC(HumongousObjAllocator); 63 NO_MOVE_SEMANTIC(HumongousObjAllocator); 64 65 template <typename T, typename... Args> New(Args &&...args)66 [[nodiscard]] T *New(Args &&...args) 67 { 68 auto p = reinterpret_cast<void *>(Alloc(sizeof(T))); 69 new (p) T(std::forward<Args>(args)...); 70 return reinterpret_cast<T *>(p); 71 } 72 73 template <typename T> 74 [[nodiscard]] T *AllocArray(size_t arrLength); 75 76 template <bool NEED_LOCK = true> 77 [[nodiscard]] void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT); 78 79 void Free(void *mem); 80 81 void Collect(const GCObjectVisitor &deathCheckerFn); 82 83 // It is essential that mem is page aligned 84 bool AddMemoryPool(void *mem, size_t size); 85 86 /** 87 * @brief Iterates over all objects allocated by this allocator. 88 * @tparam MemVisitor 89 * @param object_visitor - function pointer or functor 90 */ 91 template <typename ObjectVisitor> 92 void IterateOverObjects(const ObjectVisitor &objectVisitor); 93 94 /** 95 * @brief Iterates over all memory pools used by this allocator 96 * and remove them from the allocator structure. 97 * NOTE: This method can't be used to clear all internal allocator 98 * information and reuse the allocator somewhere else. 99 * @tparam MemVisitor 100 * @param mem_visitor - function pointer or functor 101 */ 102 template <typename MemVisitor> 103 void VisitAndRemoveAllPools(const MemVisitor &memVisitor); 104 105 /** 106 * @brief Visit memory pools that can be returned to the system in this allocator 107 * and remove them from the allocator structure. 108 * @tparam MemVisitor 109 * @param mem_visitor - function pointer or functor 110 */ 111 template <typename MemVisitor> 112 void VisitAndRemoveFreePools(const MemVisitor &memVisitor); 113 114 /** 115 * @brief Iterates over objects in the range inclusively. 116 * @tparam MemVisitor 117 * @param mem_visitor - function pointer or functor 118 * @param left_border - a pointer to the first byte of the range 119 * @param right_border - a pointer to the last byte of the range 120 */ 121 template <typename MemVisitor> 122 void IterateOverObjectsInRange(const MemVisitor &memVisitor, void *leftBorder, void *rightBorder); 123 124 HumongousObjAllocatorAdapter<void, AllocConfigT, LockConfigT> Adapter(); 125 126 /** 127 * @brief returns maximum size which can be allocated by this allocator 128 * @return 129 */ GetMaxSize()130 static constexpr size_t GetMaxSize() 131 { 132 return HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE; 133 } 134 135 /** 136 * @brief returns minimum pool size to allocate an object with @param obj_size bytes 137 * @return 138 */ GetMinPoolSize(size_t objSize)139 static constexpr size_t GetMinPoolSize(size_t objSize) 140 { 141 // To note: It is not the smallest size of the pool 142 // because we don't make a real object alignment value into account 143 return AlignUp(objSize + sizeof(MemoryPoolHeader) + GetAlignmentInBytes(LOG_ALIGN_MAX), 144 PANDA_POOL_ALIGNMENT_IN_BYTES); 145 } 146 147 bool ContainObject(const ObjectHeader *obj); 148 149 bool IsLive(const ObjectHeader *obj); 150 GetAllocatorType()151 static constexpr AllocatorType GetAllocatorType() 152 { 153 return AllocatorType::HUMONGOUS_ALLOCATOR; 154 } 155 156 private: 157 #ifndef NDEBUG 158 // Only for debug purpose to find some anomalous allocations 159 static constexpr size_t HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE = 2_GB; 160 #else 161 static constexpr size_t HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE = std::numeric_limits<size_t>::max(); 162 #endif 163 164 class MemoryPoolHeader { 165 public: 166 void Initialize(size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next); 167 168 void Alloc(size_t size, Alignment align); 169 170 ATTRIBUTE_NO_SANITIZE_ADDRESS GetPrev()171 MemoryPoolHeader *GetPrev() 172 { 173 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 174 MemoryPoolHeader *prev = prev_; 175 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 176 return prev; 177 } 178 179 ATTRIBUTE_NO_SANITIZE_ADDRESS GetNext()180 MemoryPoolHeader *GetNext() 181 { 182 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 183 MemoryPoolHeader *next = next_; 184 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 185 return next; 186 } 187 188 ATTRIBUTE_NO_SANITIZE_ADDRESS SetPrev(MemoryPoolHeader * prev)189 void SetPrev(MemoryPoolHeader *prev) 190 { 191 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 192 prev_ = prev; 193 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 194 } 195 196 ATTRIBUTE_NO_SANITIZE_ADDRESS SetNext(MemoryPoolHeader * next)197 void SetNext(MemoryPoolHeader *next) 198 { 199 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 200 next_ = next; 201 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 202 } 203 204 void PopHeader(); 205 206 ATTRIBUTE_NO_SANITIZE_ADDRESS GetPoolSize()207 size_t GetPoolSize() 208 { 209 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 210 size_t size = poolSize_; 211 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 212 return size; 213 } 214 215 ATTRIBUTE_NO_SANITIZE_ADDRESS GetMemory()216 void *GetMemory() 217 { 218 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 219 void *addr = memAddr_; 220 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 221 return addr; 222 } 223 224 private: 225 MemoryPoolHeader *prev_ {nullptr}; 226 MemoryPoolHeader *next_ {nullptr}; 227 size_t poolSize_ {0}; 228 void *memAddr_ {nullptr}; 229 }; 230 231 static constexpr size_t PAGE_SIZE_MASK = ~(PAGE_SIZE - 1); 232 233 class MemoryPoolList { 234 public: 235 void Insert(MemoryPoolHeader *pool); 236 237 void Pop(MemoryPoolHeader *pool); 238 239 /** 240 * @brief Try to find a pool suitable for object with @param size. 241 * @return a pointer to pool header on success, nullptr otherwise. 242 */ 243 MemoryPoolHeader *FindSuitablePool(size_t size); 244 245 /// @brief Iterate over pools in this list and pop all the elements. 246 template <typename MemVisitor> 247 void IterateAndPopOverPools(const MemVisitor &memVisitor); 248 GetListHead()249 MemoryPoolHeader *GetListHead() 250 { 251 return head_; 252 } 253 254 private: 255 bool IsInThisList(MemoryPoolHeader *pool); 256 257 MemoryPoolHeader *head_ {nullptr}; 258 }; 259 260 // ReservedMemoryPools class is used to prevent ping-pong effect. 261 // Elements in ReservedMemoryPools are sorted ascending. 262 // When we free a pool, we try to insert in into ReservedMemoryPools first: 263 // - If the pool is too big for ReservedMemoryPools, we skip inserting. 264 // - If the pool is bigger than the smaller pool in ReservedMemoryPools, we insert it. 265 class ReservedMemoryPools : public MemoryPoolList { 266 public: 267 /** 268 * @brief Try to insert @param pool inside ReservedMemoryPools. 269 * @return @param pool if not success, nullptr or crowded out pool on success. 270 */ 271 MemoryPoolHeader *TryToInsert(MemoryPoolHeader *pool); 272 Pop(MemoryPoolHeader * pool)273 void Pop(MemoryPoolHeader *pool) 274 { 275 elementsCount_--; 276 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) 277 << "Pop from Reserved list. Now, there are " << elementsCount_ << " elements in it."; 278 MemoryPoolList::Pop(pool); 279 } 280 281 private: 282 static constexpr size_t MAX_POOL_SIZE = PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOL_SIZE; 283 static constexpr size_t MAX_POOLS_AMOUNT = PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOLS_AMOUNT; 284 285 void SortedInsert(MemoryPoolHeader *pool); 286 287 size_t elementsCount_ {0}; 288 }; 289 290 void ReleaseUnusedPagesOnAlloc(MemoryPoolHeader *memoryPool, size_t allocSize); 291 292 void InsertPool(MemoryPoolHeader *header); 293 294 void FreeUnsafe(void *mem); 295 296 bool AllocatedByHumongousObjAllocator(void *mem); 297 298 bool AllocatedByHumongousObjAllocatorUnsafe(void *mem); 299 300 MemoryPoolList occupiedPoolsList_; 301 ReservedMemoryPools reservedPoolsList_; 302 MemoryPoolList freePoolsList_; 303 SpaceType typeAllocation_; 304 305 // RW lock which allows only one thread to change smth inside allocator 306 // NOTE: The MT support expects that we can't iterate 307 // and free (i.e. collect for an object scenario) simultaneously 308 LockConfigT allocFreeLock_; 309 310 MemStatsType *memStats_; 311 312 friend class HumongousObjAllocatorTest; 313 template <InternalAllocatorConfig CONFIG> 314 friend class InternalAllocator; 315 }; // namespace ark::mem 316 317 #undef LOG_HUMONGOUS_OBJ_ALLOCATOR 318 319 } // namespace ark::mem 320 321 #endif // PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_H 322