1 /** 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 #ifndef PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_H 16 #define PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_H 17 18 #include <limits> 19 20 #include "libpandabase/macros.h" 21 #include "libpandabase/mem/mem.h" 22 #include "libpandabase/mem/pool_manager.h" 23 #include "libpandabase/mem/space.h" 24 #include "libpandabase/utils/logger.h" 25 #include "runtime/mem/runslots.h" 26 #include "runtime/mem/lock_config_helper.h" 27 28 namespace panda::mem { 29 30 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 31 #define LOG_HUMONGOUS_OBJ_ALLOCATOR(level) LOG(level, ALLOC) << "HumongousObjAllocator: " 32 33 // TODO(aemelenko): Move this constants to compile options 34 static constexpr size_t PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOLS_AMOUNT = 0; 35 static constexpr size_t PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOL_SIZE = 8_MB; 36 37 class HumongousObjAllocatorLockConfig { 38 public: 39 using CommonLock = os::memory::RWLock; 40 using DummyLock = os::memory::DummyLock; 41 42 template <MTModeT MTMode> 43 using ParameterizedLock = typename LockConfigHelper<HumongousObjAllocatorLockConfig, MTMode>::Value; 44 }; 45 46 template <typename T, typename AllocConfigT, typename LockConfigT> 47 class HumongousObjAllocatorAdapter; 48 enum class InternalAllocatorConfig; 49 template <InternalAllocatorConfig Config> 50 class InternalAllocator; 51 52 /** 53 * HumongousObjAllocator is an allocator used for huge objects which require 54 * using the whole memory pool for each. 55 */ 56 template <typename AllocConfigT, typename LockConfigT = HumongousObjAllocatorLockConfig::CommonLock> 57 class HumongousObjAllocator { 58 public: 59 explicit HumongousObjAllocator(MemStatsType *mem_stats, 60 SpaceType type_allocation = SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT); 61 ~HumongousObjAllocator(); 62 NO_COPY_SEMANTIC(HumongousObjAllocator); 63 NO_MOVE_SEMANTIC(HumongousObjAllocator); 64 65 template <typename T, typename... Args> New(Args &&...args)66 [[nodiscard]] T *New(Args &&... args) 67 { 68 auto p = reinterpret_cast<void *>(Alloc(sizeof(T))); 69 new (p) T(std::forward<Args>(args)...); 70 return reinterpret_cast<T *>(p); 71 } 72 73 template <typename T> 74 [[nodiscard]] T *AllocArray(size_t arr_length); 75 76 template <bool need_lock = true> 77 [[nodiscard]] void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT); 78 79 void Free(void *mem); 80 81 void Collect(const GCObjectVisitor &death_checker_fn); 82 83 // It is essential that mem is page aligned 84 bool AddMemoryPool(void *mem, size_t size); 85 86 /** 87 * \brief Iterates over all objects allocated by this allocator. 88 * @tparam MemVisitor 89 * @param object_visitor - function pointer or functor 90 */ 91 template <typename ObjectVisitor> 92 void IterateOverObjects(const ObjectVisitor &object_visitor); 93 94 /** 95 * \brief Iterates over all memory pools used by this allocator 96 * and remove them from the allocator structure. 97 * NOTE: This method can't be used to clear all internal allocator 98 * information and reuse the allocator somewhere else. 99 * @tparam MemVisitor 100 * @param mem_visitor - function pointer or functor 101 */ 102 template <typename MemVisitor> 103 void VisitAndRemoveAllPools(const MemVisitor &mem_visitor); 104 105 /** 106 * \brief Visit memory pools that can be returned to the system in this allocator 107 * and remove them from the allocator structure. 108 * @tparam MemVisitor 109 * @param mem_visitor - function pointer or functor 110 */ 111 template <typename MemVisitor> 112 void VisitAndRemoveFreePools(const MemVisitor &mem_visitor); 113 114 /** 115 * \brief Iterates over objects in the range inclusively. 116 * @tparam MemVisitor 117 * @param mem_visitor - function pointer or functor 118 * @param left_border - a pointer to the first byte of the range 119 * @param right_border - a pointer to the last byte of the range 120 */ 121 template <typename MemVisitor> 122 void IterateOverObjectsInRange(const MemVisitor &mem_visitor, void *left_border, void *right_border); 123 124 HumongousObjAllocatorAdapter<void, AllocConfigT, LockConfigT> Adapter(); 125 126 /** 127 * \brief returns maximum size which can be allocated by this allocator 128 * @return 129 */ GetMaxSize()130 static constexpr size_t GetMaxSize() 131 { 132 return HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE; 133 } 134 135 /** 136 * \brief returns minimum pool size to allocate an object with \param obj_size bytes 137 * @return 138 */ GetMinPoolSize(size_t obj_size)139 static constexpr size_t GetMinPoolSize(size_t obj_size) 140 { 141 // To note: It is not the smallest size of the pool 142 // because we don't make a real object alignment value into account 143 return AlignUp(obj_size + sizeof(MemoryPoolHeader) + GetAlignmentInBytes(LOG_ALIGN_MAX), 144 PANDA_POOL_ALIGNMENT_IN_BYTES); 145 } 146 147 bool ContainObject(const ObjectHeader *obj); 148 149 bool IsLive(const ObjectHeader *obj); 150 GetAllocatorType()151 static constexpr AllocatorType GetAllocatorType() 152 { 153 return AllocatorType::HUMONGOUS_ALLOCATOR; 154 } 155 156 private: 157 #ifndef NDEBUG 158 // Only for debug purpose to find some anomalous allocations 159 static constexpr size_t HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE = 2_GB; 160 #else 161 static constexpr size_t HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE = std::numeric_limits<size_t>::max(); 162 #endif 163 164 class MemoryPoolHeader { 165 public: 166 void Initialize(size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next); 167 168 void Alloc(size_t size, Alignment align); 169 170 ATTRIBUTE_NO_SANITIZE_ADDRESS GetPrev()171 MemoryPoolHeader *GetPrev() 172 { 173 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 174 MemoryPoolHeader *prev = prev_; 175 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 176 return prev; 177 } 178 179 ATTRIBUTE_NO_SANITIZE_ADDRESS GetNext()180 MemoryPoolHeader *GetNext() 181 { 182 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 183 MemoryPoolHeader *next = next_; 184 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 185 return next; 186 } 187 188 ATTRIBUTE_NO_SANITIZE_ADDRESS SetPrev(MemoryPoolHeader * prev)189 void SetPrev(MemoryPoolHeader *prev) 190 { 191 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 192 prev_ = prev; 193 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 194 } 195 196 ATTRIBUTE_NO_SANITIZE_ADDRESS SetNext(MemoryPoolHeader * next)197 void SetNext(MemoryPoolHeader *next) 198 { 199 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 200 next_ = next; 201 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 202 } 203 204 void PopHeader(); 205 206 ATTRIBUTE_NO_SANITIZE_ADDRESS GetPoolSize()207 size_t GetPoolSize() 208 { 209 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 210 size_t size = pool_size_; 211 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 212 return size; 213 } 214 215 ATTRIBUTE_NO_SANITIZE_ADDRESS GetMemory()216 void *GetMemory() 217 { 218 ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 219 void *addr = mem_addr_; 220 ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader)); 221 return addr; 222 } 223 224 private: 225 MemoryPoolHeader *prev_ {nullptr}; 226 MemoryPoolHeader *next_ {nullptr}; 227 size_t pool_size_ {0}; 228 void *mem_addr_ {nullptr}; 229 }; 230 231 static constexpr size_t PAGE_SIZE_MASK = ~(PAGE_SIZE - 1); 232 233 class MemoryPoolList { 234 public: 235 void Insert(MemoryPoolHeader *pool); 236 237 void Pop(MemoryPoolHeader *pool); 238 239 /** 240 * \brief Try to find a pool suitable for object with \param size. 241 * @return a pointer to pool header on success, nullptr otherwise. 242 */ 243 MemoryPoolHeader *FindSuitablePool(size_t size); 244 245 /** 246 * \brief Iterate over pools in this list and pop all the elements. 247 */ 248 template <typename MemVisitor> 249 void IterateAndPopOverPools(const MemVisitor &mem_visitor); 250 GetListHead()251 MemoryPoolHeader *GetListHead() 252 { 253 return head_; 254 } 255 256 private: 257 bool IsInThisList(MemoryPoolHeader *pool); 258 259 MemoryPoolHeader *head_ {nullptr}; 260 }; 261 262 // ReservedMemoryPools class is used to prevent ping-pong effect. 263 // Elements in ReservedMemoryPools are sorted ascending. 264 // When we free a pool, we try to insert in into ReservedMemoryPools first: 265 // - If the pool is too big for ReservedMemoryPools, we skip inserting. 266 // - If the pool is bigger than the smaller pool in ReservedMemoryPools, we insert it. 267 class ReservedMemoryPools : public MemoryPoolList { 268 public: 269 /** 270 * \brief Try to insert \param pool inside ReservedMemoryPools. 271 * @return \param pool if not success, nullptr or crowded out pool on success. 272 */ 273 MemoryPoolHeader *TryToInsert(MemoryPoolHeader *pool); 274 Pop(MemoryPoolHeader * pool)275 void Pop(MemoryPoolHeader *pool) 276 { 277 elements_count_--; 278 LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) 279 << "Pop from Reserved list. Now, there are " << elements_count_ << " elements in it."; 280 MemoryPoolList::Pop(pool); 281 } 282 283 private: 284 static constexpr size_t MAX_POOL_SIZE = PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOL_SIZE; 285 static constexpr size_t MAX_POOLS_AMOUNT = PANDA_HUMONGOUS_OBJ_ALLOCATOR_RESERVED_MEM_MAX_POOLS_AMOUNT; 286 287 void SortedInsert(MemoryPoolHeader *pool); 288 289 size_t elements_count_ {0}; 290 }; 291 292 void ReleaseUnusedPagesOnAlloc(MemoryPoolHeader *memory_pool, size_t alloc_size); 293 294 void InsertPool(MemoryPoolHeader *header); 295 296 void FreeUnsafe(void *mem); 297 298 bool AllocatedByHumongousObjAllocator(void *mem); 299 300 bool AllocatedByHumongousObjAllocatorUnsafe(void *mem); 301 302 MemoryPoolList occupied_pools_list_; 303 ReservedMemoryPools reserved_pools_list_; 304 MemoryPoolList free_pools_list_; 305 SpaceType type_allocation_; 306 307 // RW lock which allows only one thread to change smth inside allocator 308 // NOTE: The MT support expects that we can't iterate 309 // and free (i.e. collect for an object scenario) simultaneously 310 LockConfigT alloc_free_lock_; 311 312 MemStatsType *mem_stats_; 313 314 friend class HumongousObjAllocatorTest; 315 friend class HybridObjectAllocatorTest; 316 template <InternalAllocatorConfig Config> 317 friend class InternalAllocator; 318 }; // namespace panda::mem 319 320 #undef LOG_HUMONGOUS_OBJ_ALLOCATOR 321 322 } // namespace panda::mem 323 324 #endif // PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_H 325