• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_INL_H
16 #define PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_INL_H
17 
18 #include "runtime/mem/alloc_config.h"
19 #include "runtime/mem/humongous_obj_allocator.h"
20 #include "runtime/mem/object_helpers.h"
21 
22 namespace panda::mem {
23 
24 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
25 #define LOG_HUMONGOUS_OBJ_ALLOCATOR(level) LOG(level, ALLOC) << "HumongousObjAllocator: "
26 
27 template <typename AllocConfigT, typename LockConfigT>
HumongousObjAllocator(MemStatsType * mem_stats,SpaceType type_allocation)28 HumongousObjAllocator<AllocConfigT, LockConfigT>::HumongousObjAllocator(MemStatsType *mem_stats,
29                                                                         SpaceType type_allocation)
30     : type_allocation_(type_allocation), mem_stats_(mem_stats)
31 {
32     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Initializing HumongousObjAllocator";
33     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Initializing HumongousObjAllocator finished";
34 }
35 
36 template <typename AllocConfigT, typename LockConfigT>
~HumongousObjAllocator()37 HumongousObjAllocator<AllocConfigT, LockConfigT>::~HumongousObjAllocator()
38 {
39     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Destroying HumongousObjAllocator";
40     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Destroying HumongousObjAllocator finished";
41 }
42 
43 template <typename AllocConfigT, typename LockConfigT>
44 template <bool need_lock>
Alloc(const size_t size,const Alignment align)45 void *HumongousObjAllocator<AllocConfigT, LockConfigT>::Alloc(const size_t size, const Alignment align)
46 {
47     os::memory::WriteLockHolder<LockConfigT, need_lock> wlock(alloc_free_lock_);
48     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to allocate memory with size " << size;
49 
50     // Check that we can get a memory header for the memory pointer by using PAGE_SIZE_MASK mask
51     if (UNLIKELY(PAGE_SIZE <= sizeof(MemoryPoolHeader) + GetAlignmentInBytes(align))) {
52         ASSERT(PAGE_SIZE > sizeof(MemoryPoolHeader) + GetAlignmentInBytes(align));
53         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The align is too big for this allocator. Return nullptr.";
54         return nullptr;
55     }
56 
57     // TODO(aemelenko): this is quite raw approximation.
58     // We can save about sizeof(MemoryPoolHeader) / 2 bytes here
59     // (BTW, it is not so much for MB allocations)
60     size_t aligned_size = size + sizeof(MemoryPoolHeader) + GetAlignmentInBytes(align);
61 
62     void *mem = nullptr;
63 
64     if (UNLIKELY(aligned_size > HUMONGOUS_OBJ_ALLOCATOR_MAX_SIZE)) {
65         // the size is too big
66         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The size is too big for this allocator. Return nullptr.";
67         return nullptr;
68     }
69 
70     // First try to find suitable block in Reserved pools
71     MemoryPoolHeader *mem_header = reserved_pools_list_.FindSuitablePool(aligned_size);
72     if (mem_header != nullptr) {
73         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Find reserved memory block with size " << mem_header->GetPoolSize();
74         reserved_pools_list_.Pop(mem_header);
75         mem_header->Alloc(size, align);
76         mem = mem_header->GetMemory();
77     } else {
78         mem_header = free_pools_list_.FindSuitablePool(aligned_size);
79         if (mem_header != nullptr) {
80             LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Find free memory block with size " << mem_header->GetPoolSize();
81             free_pools_list_.Pop(mem_header);
82             mem_header->Alloc(size, align);
83             mem = mem_header->GetMemory();
84         } else {
85             LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Can't find memory for this size";
86             return nullptr;
87         }
88     }
89     occupied_pools_list_.Insert(mem_header);
90     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Allocated memory at addr " << std::hex << mem;
91     AllocConfigT::OnAlloc(mem_header->GetPoolSize(), type_allocation_, mem_stats_);
92     ASAN_UNPOISON_MEMORY_REGION(mem, size);
93     AllocConfigT::MemoryInit(mem, size);
94     ReleaseUnusedPagesOnAlloc(mem_header, size);
95     return mem;
96 }
97 
98 template <typename AllocConfigT, typename LockConfigT>
Free(void * mem)99 void HumongousObjAllocator<AllocConfigT, LockConfigT>::Free(void *mem)
100 {
101     os::memory::WriteLockHolder wlock(alloc_free_lock_);
102     FreeUnsafe(mem);
103 }
104 
105 template <typename AllocConfigT, typename LockConfigT>
FreeUnsafe(void * mem)106 void HumongousObjAllocator<AllocConfigT, LockConfigT>::FreeUnsafe(void *mem)
107 {
108     if (UNLIKELY(mem == nullptr)) {
109         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to free memory at invalid addr 0";
110         return;
111     }
112     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to free memory at addr " << std::hex << mem;
113 #ifndef NDEBUG
114     if (!AllocatedByHumongousObjAllocatorUnsafe(mem)) {
115         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to free memory not from this allocator";
116         return;
117     }
118 #endif  // !NDEBUG
119 
120     // Each memory pool is PAGE_SIZE aligned, so to get a header we need just to align a pointer
121     auto mem_header = static_cast<MemoryPoolHeader *>(ToVoidPtr(ToUintPtr(mem) & PAGE_SIZE_MASK));
122     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "It is a MemoryPoolHeader with addr " << std::hex << mem_header
123                                        << " and size " << std::dec << mem_header->GetPoolSize();
124     occupied_pools_list_.Pop(mem_header);
125     AllocConfigT::OnFree(mem_header->GetPoolSize(), type_allocation_, mem_stats_);
126     ASAN_POISON_MEMORY_REGION(mem_header, mem_header->GetPoolSize());
127     InsertPool(mem_header);
128     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Freed memory at addr " << std::hex << mem;
129 }
130 
131 template <typename AllocConfigT, typename LockConfigT>
Collect(const GCObjectVisitor & death_checker_fn)132 void HumongousObjAllocator<AllocConfigT, LockConfigT>::Collect(const GCObjectVisitor &death_checker_fn)
133 {
134     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Collecting started";
135     IterateOverObjects([&](ObjectHeader *object_header) {
136         if (death_checker_fn(object_header) == ObjectStatus::DEAD_OBJECT) {
137             LOG(DEBUG, GC) << "DELETE OBJECT " << GetDebugInfoAboutObject(object_header);
138             FreeUnsafe(object_header);
139         }
140     });
141     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Collecting finished";
142 }
143 
144 template <typename AllocConfigT, typename LockConfigT>
145 template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & object_visitor)146 void HumongousObjAllocator<AllocConfigT, LockConfigT>::IterateOverObjects(const ObjectVisitor &object_visitor)
147 {
148     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Iterating over objects started";
149     MemoryPoolHeader *current_pool = nullptr;
150     {
151         os::memory::ReadLockHolder rlock(alloc_free_lock_);
152         current_pool = occupied_pools_list_.GetListHead();
153     }
154     while (current_pool != nullptr) {
155         os::memory::WriteLockHolder wlock(alloc_free_lock_);
156         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "  check pool at addr " << std::hex << current_pool;
157         MemoryPoolHeader *next = current_pool->GetNext();
158         object_visitor(static_cast<ObjectHeader *>(current_pool->GetMemory()));
159         current_pool = next;
160     }
161     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Iterating over objects finished";
162 }
163 
164 template <typename AllocConfigT, typename LockConfigT>
AddMemoryPool(void * mem,size_t size)165 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::AddMemoryPool(void *mem, size_t size)
166 {
167     os::memory::WriteLockHolder wlock(alloc_free_lock_);
168     ASSERT(mem != nullptr);
169     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Add memory pool to HumongousObjAllocator from  " << std::hex << mem
170                                        << " with size " << std::dec << size;
171     if (AlignUp(ToUintPtr(mem), PAGE_SIZE) != ToUintPtr(mem)) {
172         return false;
173     }
174     auto mempool_header = static_cast<MemoryPoolHeader *>(mem);
175     mempool_header->Initialize(size, nullptr, nullptr);
176     InsertPool(mempool_header);
177     ASAN_POISON_MEMORY_REGION(mem, size);
178     return true;
179 }
180 
181 template <typename AllocConfigT, typename LockConfigT>
ReleaseUnusedPagesOnAlloc(MemoryPoolHeader * memory_pool,size_t alloc_size)182 void HumongousObjAllocator<AllocConfigT, LockConfigT>::ReleaseUnusedPagesOnAlloc(MemoryPoolHeader *memory_pool,
183                                                                                  size_t alloc_size)
184 {
185     ASSERT(memory_pool != nullptr);
186     uintptr_t alloc_addr = ToUintPtr(memory_pool->GetMemory());
187     uintptr_t pool_addr = ToUintPtr(memory_pool);
188     size_t pool_size = memory_pool->GetPoolSize();
189     uintptr_t first_free_page = AlignUp(alloc_addr + alloc_size, os::mem::GetPageSize());
190     uintptr_t end_of_last_free_page = os::mem::AlignDownToPageSize(pool_addr + pool_size);
191     if (first_free_page < end_of_last_free_page) {
192         os::mem::ReleasePages(first_free_page, end_of_last_free_page);
193     }
194 }
195 
196 template <typename AllocConfigT, typename LockConfigT>
InsertPool(MemoryPoolHeader * header)197 void HumongousObjAllocator<AllocConfigT, LockConfigT>::InsertPool(MemoryPoolHeader *header)
198 {
199     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to insert pool with size " << header->GetPoolSize()
200                                        << " in Reserved memory";
201     // Try to insert it into ReservedMemoryPools
202     MemoryPoolHeader *mem_header = reserved_pools_list_.TryToInsert(header);
203     if (mem_header == nullptr) {
204         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Successfully inserted in Reserved memory";
205         // We successfully insert header into ReservedMemoryPools
206         return;
207     }
208     // We have a crowded out pool or the "header" argument in mem_header
209     // Insert it into free_pools
210     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Couldn't insert into Reserved memory. Insert in free pools";
211     free_pools_list_.Insert(mem_header);
212 }
213 
214 template <typename AllocConfigT, typename LockConfigT>
215 template <typename MemVisitor>
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)216 void HumongousObjAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveAllPools(const MemVisitor &mem_visitor)
217 {
218     // We call this method and return pools to the system.
219     // Therefore, delete all objects to clear all external dependences
220     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Clear all objects inside the allocator";
221     os::memory::WriteLockHolder wlock(alloc_free_lock_);
222     occupied_pools_list_.IterateAndPopOverPools(mem_visitor);
223     reserved_pools_list_.IterateAndPopOverPools(mem_visitor);
224     free_pools_list_.IterateAndPopOverPools(mem_visitor);
225 }
226 
227 template <typename AllocConfigT, typename LockConfigT>
228 template <typename MemVisitor>
VisitAndRemoveFreePools(const MemVisitor & mem_visitor)229 void HumongousObjAllocator<AllocConfigT, LockConfigT>::VisitAndRemoveFreePools(const MemVisitor &mem_visitor)
230 {
231     os::memory::WriteLockHolder wlock(alloc_free_lock_);
232     free_pools_list_.IterateAndPopOverPools(mem_visitor);
233 }
234 
235 template <typename AllocConfigT, typename LockConfigT>
236 template <typename MemVisitor>
IterateOverObjectsInRange(const MemVisitor & mem_visitor,void * left_border,void * right_border)237 void HumongousObjAllocator<AllocConfigT, LockConfigT>::IterateOverObjectsInRange(const MemVisitor &mem_visitor,
238                                                                                  void *left_border, void *right_border)
239 {
240     // NOTE: Current implementation doesn't look at PANDA_CROSSING_MAP_MANAGE_CROSSED_BORDER flag
241     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "HumongousObjAllocator::IterateOverObjectsInRange for range [" << std::hex
242                                        << left_border << ", " << right_border << "]";
243     ASSERT(ToUintPtr(right_border) >= ToUintPtr(left_border));
244     // TODO(aemelenko): These are temporary asserts because we can't do anything
245     // if the range crosses different allocators memory pools
246     ASSERT(ToUintPtr(right_border) - ToUintPtr(left_border) ==
247            (CrossingMapSingleton::GetCrossingMapGranularity() - 1U));
248     ASSERT((ToUintPtr(right_border) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))) ==
249            (ToUintPtr(left_border) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))));
250 
251     // Try to find a pool with this range
252     MemoryPoolHeader *discovered_pool = nullptr;
253     MemoryPoolHeader *current_pool = nullptr;
254     {
255         os::memory::ReadLockHolder rlock(alloc_free_lock_);
256         current_pool = occupied_pools_list_.GetListHead();
257     }
258     while (current_pool != nullptr) {
259         // Use current pool here because it is page aligned
260         uintptr_t current_pool_start = ToUintPtr(current_pool);
261         uintptr_t current_pool_end = ToUintPtr(current_pool->GetMemory()) + current_pool->GetPoolSize();
262         if (current_pool_start <= ToUintPtr(left_border)) {
263             // Check that this range is located in the same pool
264             if (current_pool_end >= ToUintPtr(right_border)) {
265                 discovered_pool = current_pool;
266                 break;
267             }
268         }
269         {
270             os::memory::ReadLockHolder rlock(alloc_free_lock_);
271             current_pool = current_pool->GetNext();
272         }
273     }
274 
275     if (discovered_pool != nullptr) {
276         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG)
277             << "HumongousObjAllocator: It is a MemoryPoolHeader with addr " << std::hex << discovered_pool
278             << " and size " << std::dec << discovered_pool->GetPoolSize();
279         mem_visitor(static_cast<ObjectHeader *>(discovered_pool->GetMemory()));
280     } else {
281         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG)
282             << "HumongousObjAllocator This memory range is not covered by this allocator";
283     }
284     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "HumongousObjAllocator::IterateOverObjectsInRange finished";
285 }
286 
287 template <typename AllocConfigT, typename LockConfigT>
AllocatedByHumongousObjAllocator(void * mem)288 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::AllocatedByHumongousObjAllocator(void *mem)
289 {
290     os::memory::ReadLockHolder rlock(alloc_free_lock_);
291     return AllocatedByHumongousObjAllocatorUnsafe(mem);
292 }
293 
294 template <typename AllocConfigT, typename LockConfigT>
AllocatedByHumongousObjAllocatorUnsafe(void * mem)295 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::AllocatedByHumongousObjAllocatorUnsafe(void *mem)
296 {
297     MemoryPoolHeader *current_pool = occupied_pools_list_.GetListHead();
298     while (current_pool != nullptr) {
299         if (current_pool->GetMemory() == mem) {
300             return true;
301         }
302         current_pool = current_pool->GetNext();
303     }
304     return false;
305 }
306 
307 template <typename AllocConfigT, typename LockConfigT>
Initialize(size_t size,MemoryPoolHeader * prev,MemoryPoolHeader * next)308 ATTRIBUTE_NO_SANITIZE_ADDRESS void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::Initialize(
309     size_t size, MemoryPoolHeader *prev, MemoryPoolHeader *next)
310 {
311     ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
312     pool_size_ = size;
313     prev_ = prev;
314     next_ = next;
315     mem_addr_ = nullptr;
316     ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
317 }
318 
319 template <typename AllocConfigT, typename LockConfigT>
Alloc(size_t size,Alignment align)320 ATTRIBUTE_NO_SANITIZE_ADDRESS void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::Alloc(
321     size_t size, Alignment align)
322 {
323     (void)size;
324     ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
325     mem_addr_ = ToVoidPtr(AlignUp(ToUintPtr(this) + sizeof(MemoryPoolHeader), GetAlignmentInBytes(align)));
326     ASSERT(ToUintPtr(mem_addr_) + size <= ToUintPtr(this) + pool_size_);
327     ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
328 }
329 
330 template <typename AllocConfigT, typename LockConfigT>
PopHeader()331 ATTRIBUTE_NO_SANITIZE_ADDRESS void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader::PopHeader()
332 {
333     ASAN_UNPOISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
334     if (prev_ != nullptr) {
335         ASAN_UNPOISON_MEMORY_REGION(prev_, sizeof(MemoryPoolHeader));
336         prev_->SetNext(next_);
337         ASAN_POISON_MEMORY_REGION(prev_, sizeof(MemoryPoolHeader));
338     }
339     if (next_ != nullptr) {
340         ASAN_UNPOISON_MEMORY_REGION(next_, sizeof(MemoryPoolHeader));
341         next_->SetPrev(prev_);
342         ASAN_POISON_MEMORY_REGION(next_, sizeof(MemoryPoolHeader));
343     }
344     next_ = nullptr;
345     prev_ = nullptr;
346     ASAN_POISON_MEMORY_REGION(this, sizeof(MemoryPoolHeader));
347 }
348 
349 template <typename AllocConfigT, typename LockConfigT>
Pop(MemoryPoolHeader * pool)350 void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::Pop(MemoryPoolHeader *pool)
351 {
352     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Pop a pool with addr " << std::hex << pool << " from the pool list";
353     ASSERT(IsInThisList(pool));
354     if (head_ == pool) {
355         head_ = pool->GetNext();
356         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "It was a pointer to list head. Change head to " << std::hex << head_;
357     }
358     pool->PopHeader();
359 }
360 
361 template <typename AllocConfigT, typename LockConfigT>
Insert(MemoryPoolHeader * pool)362 void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::Insert(MemoryPoolHeader *pool)
363 {
364     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Insert a pool with addr " << std::hex << pool << " into the pool list";
365     if (head_ != nullptr) {
366         head_->SetPrev(pool);
367     } else {
368         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The head was not initialized. Set it up.";
369     }
370     pool->SetNext(head_);
371     pool->SetPrev(nullptr);
372     head_ = pool;
373 }
374 
375 template <typename AllocConfigT, typename LockConfigT>
376 typename HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader *
FindSuitablePool(size_t size)377 HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::FindSuitablePool(size_t size)
378 {
379     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to find suitable pool for memory with size " << size;
380     MemoryPoolHeader *cur_pool = head_;
381     while (cur_pool != nullptr) {
382         if (cur_pool->GetPoolSize() >= size) {
383             break;
384         }
385         cur_pool = cur_pool->GetNext();
386     }
387     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Found a pool with addr " << std::hex << cur_pool;
388     return cur_pool;
389 }
390 
391 template <typename AllocConfigT, typename LockConfigT>
IsInThisList(MemoryPoolHeader * pool)392 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::IsInThisList(MemoryPoolHeader *pool)
393 {
394     // TODO(aemelenko): Do it only in debug build
395     MemoryPoolHeader *cur_pool = head_;
396     while (cur_pool != nullptr) {
397         if (cur_pool == pool) {
398             break;
399         }
400         cur_pool = cur_pool->GetNext();
401     }
402     return cur_pool != nullptr;
403 }
404 
405 template <typename AllocConfigT, typename LockConfigT>
406 template <typename MemVisitor>
IterateAndPopOverPools(const MemVisitor & mem_visitor)407 void HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolList::IterateAndPopOverPools(
408     const MemVisitor &mem_visitor)
409 {
410     MemoryPoolHeader *current_pool = head_;
411     while (current_pool != nullptr) {
412         MemoryPoolHeader *tmp = current_pool->GetNext();
413         this->Pop(current_pool);
414         mem_visitor(current_pool, current_pool->GetPoolSize());
415         current_pool = tmp;
416     }
417 }
418 
419 template <typename AllocConfigT, typename LockConfigT>
420 typename HumongousObjAllocator<AllocConfigT, LockConfigT>::MemoryPoolHeader *
TryToInsert(MemoryPoolHeader * pool)421 HumongousObjAllocator<AllocConfigT, LockConfigT>::ReservedMemoryPools::TryToInsert(MemoryPoolHeader *pool)
422 {
423     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Try to insert a pool in Reserved memory with addr " << std::hex << pool;
424     if (pool->GetPoolSize() > MAX_POOL_SIZE) {
425         // This pool is too big for inserting in Reserved
426         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "It is too big for Reserved memory";
427         return pool;
428     }
429     if (elements_count_ < MAX_POOLS_AMOUNT) {
430         // We can insert the memory pool to Reserved
431         SortedInsert(pool);
432         elements_count_++;
433         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "We don't have max amount of elements in Reserved list. Just insert.";
434         return nullptr;
435     }
436     // We have the max amount of elements in the Reserved pools list
437     // Try to swap the smallest pool (which is the first because it is ordered list)
438     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "We have max amount of elements in Reserved list.";
439     MemoryPoolHeader *smallest_pool = this->GetListHead();
440     if (smallest_pool == nullptr) {
441         // It is the only variant when smallest_pool can be equal to nullptr.
442         ASSERT(MAX_POOLS_AMOUNT == 0);
443         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "MAX_POOLS_AMOUNT for Reserved list is equal to zero. Do nothing";
444         return pool;
445     }
446     ASSERT(smallest_pool != nullptr);
447     if (smallest_pool->GetPoolSize() >= pool->GetPoolSize()) {
448         LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "The pool is too small. Do not insert it";
449         return pool;
450     }
451     // Just pop this element from the list. Do not update elements_count_ value
452     MemoryPoolList::Pop(smallest_pool);
453     SortedInsert(pool);
454     LOG_HUMONGOUS_OBJ_ALLOCATOR(DEBUG) << "Swap the smallest element in Reserved list with addr " << std::hex
455                                        << smallest_pool;
456     return smallest_pool;
457 }
458 
459 template <typename AllocConfigT, typename LockConfigT>
SortedInsert(MemoryPoolHeader * pool)460 void HumongousObjAllocator<AllocConfigT, LockConfigT>::ReservedMemoryPools::SortedInsert(MemoryPoolHeader *pool)
461 {
462     size_t pool_size = pool->GetPoolSize();
463     MemoryPoolHeader *list_head = this->GetListHead();
464     if (list_head == nullptr) {
465         this->Insert(pool);
466         return;
467     }
468     if (list_head->GetPoolSize() >= pool_size) {
469         // Do this comparison to not update head_ in this method
470         this->Insert(pool);
471         return;
472     }
473     MemoryPoolHeader *cur = list_head;
474     while (cur != nullptr) {
475         if (cur->GetPoolSize() >= pool_size) {
476             pool->SetNext(cur);
477             pool->SetPrev(cur->GetPrev());
478             cur->GetPrev()->SetNext(pool);
479             cur->SetPrev(pool);
480             return;
481         }
482         MemoryPoolHeader *next = cur->GetNext();
483         if (next == nullptr) {
484             cur->SetNext(pool);
485             pool->SetNext(nullptr);
486             pool->SetPrev(cur);
487             return;
488         }
489         cur = next;
490     }
491 }
492 
493 template <typename AllocConfigT, typename LockConfigT>
ContainObject(const ObjectHeader * obj)494 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::ContainObject(const ObjectHeader *obj)
495 {
496     return AllocatedByHumongousObjAllocatorUnsafe(const_cast<ObjectHeader *>(obj));
497 }
498 
499 template <typename AllocConfigT, typename LockConfigT>
IsLive(const ObjectHeader * obj)500 bool HumongousObjAllocator<AllocConfigT, LockConfigT>::IsLive(const ObjectHeader *obj)
501 {
502     ASSERT(ContainObject(obj));
503     auto *mem_header = static_cast<MemoryPoolHeader *>(ToVoidPtr(ToUintPtr(obj) & PAGE_SIZE_MASK));
504     ASSERT(PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(
505                static_cast<void *>(const_cast<ObjectHeader *>(obj))) == static_cast<void *>(mem_header));
506     return mem_header->GetMemory() == static_cast<void *>(const_cast<ObjectHeader *>(obj));
507 }
508 
509 #undef LOG_HUMONGOUS_OBJ_ALLOCATOR
510 
511 }  // namespace panda::mem
512 
513 #endif  // PANDA_MEM_HUMONGOUS_OBJ_ALLOCATOR_INL_H
514