• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 // These includes to avoid linker error:
16 
17 #include "runtime/include/mem/allocator.h"
18 #include "runtime/include/mem/allocator-inl.h"
19 #include "mem/mem_pool.h"
20 #include "mem/mem_config.h"
21 #include "runtime/include/runtime.h"
22 #include "runtime/include/panda_vm.h"
23 #include "runtime/mem/bump-allocator-inl.h"
24 #include "runtime/mem/freelist_allocator-inl.h"
25 #include "runtime/mem/internal_allocator-inl.h"
26 #include "runtime/mem/runslots_allocator-inl.h"
27 #include "runtime/mem/pygote_space_allocator-inl.h"
28 #include "runtime/mem/tlab.h"
29 
30 namespace panda::mem {
31 
32 Allocator::~Allocator() = default;
33 
ObjectAllocatorBase(MemStatsType * mem_stats,GCCollectMode gc_collect_mode,bool create_pygote_space_allocator)34 ObjectAllocatorBase::ObjectAllocatorBase(MemStatsType *mem_stats, GCCollectMode gc_collect_mode,
35                                          bool create_pygote_space_allocator)
36     : Allocator(mem_stats, AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT, gc_collect_mode)
37 {
38     if (create_pygote_space_allocator) {
39         pygote_space_allocator_ = new (std::nothrow) PygoteAllocator(mem_stats);
40         pygote_alloc_enabled_ = true;
41     }
42 }
43 
~ObjectAllocatorBase()44 ObjectAllocatorBase::~ObjectAllocatorBase()
45 {
46     // NOLINTNEXTLINE(readability-delete-null-pointer)
47     if (pygote_space_allocator_ != nullptr) {
48         delete pygote_space_allocator_;
49     }
50 }
51 
HaveEnoughPoolsInObjectSpace(size_t pools_num) const52 bool ObjectAllocatorBase::HaveEnoughPoolsInObjectSpace(size_t pools_num) const
53 {
54     auto mem_pool = PoolManager::GetMmapMemPool();
55     auto pool_size = std::max(PANDA_DEFAULT_POOL_SIZE, PANDA_DEFAULT_ALLOCATOR_POOL_SIZE);
56     return mem_pool->HaveEnoughPoolsInObjectSpace(pools_num, pool_size);
57 }
58 
59 template <MTModeT MTMode>
ObjectAllocatorNoGen(MemStatsType * mem_stats,bool create_pygote_space_allocator)60 ObjectAllocatorNoGen<MTMode>::ObjectAllocatorNoGen(MemStatsType *mem_stats, bool create_pygote_space_allocator)
61     : ObjectAllocatorBase(mem_stats, GCCollectMode::GC_ALL, create_pygote_space_allocator)
62 {
63     const auto &options = Runtime::GetOptions();
64     heap_space_.Initialize(MemConfig::GetInitialHeapSizeLimit(), MemConfig::GetHeapSizeLimit(),
65                            options.GetMinHeapFreePercentage(), options.GetMaxHeapFreePercentage());
66     if (create_pygote_space_allocator) {
67         ASSERT(pygote_space_allocator_ != nullptr);
68         pygote_space_allocator_->SetHeapSpace(&heap_space_);
69     }
70     object_allocator_ = new (std::nothrow) ObjectAllocator(mem_stats);
71     large_object_allocator_ = new (std::nothrow) LargeObjectAllocator(mem_stats);
72     humongous_object_allocator_ = new (std::nothrow) HumongousObjectAllocator(mem_stats);
73 }
74 
75 template <MTModeT MTMode>
~ObjectAllocatorNoGen()76 ObjectAllocatorNoGen<MTMode>::~ObjectAllocatorNoGen()
77 {
78     delete object_allocator_;
79     delete large_object_allocator_;
80     delete humongous_object_allocator_;
81 }
82 
83 template <MTModeT MTMode>
Allocate(size_t size,Alignment align,panda::ManagedThread * thread)84 void *ObjectAllocatorNoGen<MTMode>::Allocate(size_t size, Alignment align,
85                                              [[maybe_unused]] panda::ManagedThread *thread)
86 {
87     void *mem = nullptr;
88     size_t aligned_size = AlignUp(size, GetAlignmentInBytes(align));
89     if (aligned_size <= ObjectAllocator::GetMaxSize()) {
90         size_t pool_size = std::max(PANDA_DEFAULT_POOL_SIZE, ObjectAllocator::GetMinPoolSize());
91         mem = AllocateSafe(size, align, object_allocator_, pool_size, SpaceType::SPACE_TYPE_OBJECT, &heap_space_);
92     } else if (aligned_size <= LargeObjectAllocator::GetMaxSize()) {
93         size_t pool_size = std::max(PANDA_DEFAULT_POOL_SIZE, LargeObjectAllocator::GetMinPoolSize());
94         mem = AllocateSafe(size, align, large_object_allocator_, pool_size, SpaceType::SPACE_TYPE_OBJECT, &heap_space_);
95     } else {
96         size_t pool_size = std::max(PANDA_DEFAULT_POOL_SIZE, HumongousObjectAllocator::GetMinPoolSize(size));
97         mem = AllocateSafe(size, align, humongous_object_allocator_, pool_size, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT,
98                            &heap_space_);
99     }
100     return mem;
101 }
102 
103 template <MTModeT MTMode>
AllocateNonMovable(size_t size,Alignment align,panda::ManagedThread * thread)104 void *ObjectAllocatorNoGen<MTMode>::AllocateNonMovable(size_t size, Alignment align, panda::ManagedThread *thread)
105 {
106     // before pygote fork, allocate small non-movable objects in pygote space
107     if (UNLIKELY(IsPygoteAllocEnabled() && pygote_space_allocator_->CanAllocNonMovable(size, align))) {
108         return pygote_space_allocator_->Alloc(size, align);
109     }
110     // Without generations - no compaction now, so all allocations are non-movable
111     return Allocate(size, align, thread);
112 }
113 
114 template <MTModeT MTMode>
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)115 void ObjectAllocatorNoGen<MTMode>::VisitAndRemoveAllPools(const MemVisitor &mem_visitor)
116 {
117     if (pygote_space_allocator_ != nullptr) {
118         pygote_space_allocator_->VisitAndRemoveAllPools(mem_visitor);
119     }
120     object_allocator_->VisitAndRemoveAllPools(mem_visitor);
121     large_object_allocator_->VisitAndRemoveAllPools(mem_visitor);
122     humongous_object_allocator_->VisitAndRemoveAllPools(mem_visitor);
123 }
124 
125 template <MTModeT MTMode>
VisitAndRemoveFreePools(const MemVisitor & mem_visitor)126 void ObjectAllocatorNoGen<MTMode>::VisitAndRemoveFreePools(const MemVisitor &mem_visitor)
127 {
128     if (pygote_space_allocator_ != nullptr) {
129         pygote_space_allocator_->VisitAndRemoveFreePools(mem_visitor);
130     }
131     object_allocator_->VisitAndRemoveFreePools(mem_visitor);
132     large_object_allocator_->VisitAndRemoveFreePools(mem_visitor);
133     humongous_object_allocator_->VisitAndRemoveFreePools(mem_visitor);
134 }
135 
136 template <MTModeT MTMode>
IterateOverObjects(const ObjectVisitor & object_visitor)137 void ObjectAllocatorNoGen<MTMode>::IterateOverObjects(const ObjectVisitor &object_visitor)
138 {
139     if (pygote_space_allocator_ != nullptr) {
140         pygote_space_allocator_->IterateOverObjects(object_visitor);
141     }
142     object_allocator_->IterateOverObjects(object_visitor);
143     large_object_allocator_->IterateOverObjects(object_visitor);
144     humongous_object_allocator_->IterateOverObjects(object_visitor);
145 }
146 
147 template <MTModeT MTMode>
IterateRegularSizeObjects(const ObjectVisitor & object_visitor)148 void ObjectAllocatorNoGen<MTMode>::IterateRegularSizeObjects(const ObjectVisitor &object_visitor)
149 {
150     object_allocator_->IterateOverObjects(object_visitor);
151 }
152 
153 template <MTModeT MTMode>
IterateNonRegularSizeObjects(const ObjectVisitor & object_visitor)154 void ObjectAllocatorNoGen<MTMode>::IterateNonRegularSizeObjects(const ObjectVisitor &object_visitor)
155 {
156     if (pygote_space_allocator_ != nullptr) {
157         pygote_space_allocator_->IterateOverObjects(object_visitor);
158     }
159     large_object_allocator_->IterateOverObjects(object_visitor);
160     humongous_object_allocator_->IterateOverObjects(object_visitor);
161 }
162 
163 template <MTModeT MTMode>
FreeObjectsMovedToPygoteSpace()164 void ObjectAllocatorNoGen<MTMode>::FreeObjectsMovedToPygoteSpace()
165 {
166     // clear because we have move all objects in it to pygote space
167     object_allocator_->VisitAndRemoveAllPools(
168         [](void *mem, size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
169     delete object_allocator_;
170     object_allocator_ = new (std::nothrow) ObjectAllocator(mem_stats_);
171 }
172 
173 template <MTModeT MTMode>
Collect(const GCObjectVisitor & gc_object_visitor,GCCollectMode collect_mode)174 void ObjectAllocatorNoGen<MTMode>::Collect(const GCObjectVisitor &gc_object_visitor,
175                                            [[maybe_unused]] GCCollectMode collect_mode)
176 {
177     if (pygote_space_allocator_ != nullptr) {
178         pygote_space_allocator_->Collect(gc_object_visitor);
179     }
180     object_allocator_->Collect(gc_object_visitor);
181     large_object_allocator_->Collect(gc_object_visitor);
182     humongous_object_allocator_->Collect(gc_object_visitor);
183 }
184 
185 // if there is a common base class for these allocators, we could split this func and return the pointer to the
186 // allocator containing the object
187 template <MTModeT MTMode>
ContainObject(const ObjectHeader * obj) const188 bool ObjectAllocatorNoGen<MTMode>::ContainObject(const ObjectHeader *obj) const
189 {
190     if (object_allocator_->ContainObject(obj)) {
191         return true;
192     }
193     if (large_object_allocator_->ContainObject(obj)) {
194         return true;
195     }
196     if (humongous_object_allocator_->ContainObject(obj)) {
197         return true;
198     }
199 
200     return false;
201 }
202 
203 template <MTModeT MTMode>
IsLive(const ObjectHeader * obj)204 bool ObjectAllocatorNoGen<MTMode>::IsLive(const ObjectHeader *obj)
205 {
206     if (pygote_space_allocator_ != nullptr && pygote_space_allocator_->ContainObject(obj)) {
207         return pygote_space_allocator_->IsLive(obj);
208     }
209     if (object_allocator_->ContainObject(obj)) {
210         return object_allocator_->IsLive(obj);
211     }
212     if (large_object_allocator_->ContainObject(obj)) {
213         return large_object_allocator_->IsLive(obj);
214     }
215     if (humongous_object_allocator_->ContainObject(obj)) {
216         return humongous_object_allocator_->IsLive(obj);
217     }
218     return false;
219 }
220 
221 template <MTModeT MTMode>
Allocate(size_t size,Alignment align,panda::ManagedThread * thread)222 void *ObjectAllocatorGen<MTMode>::Allocate(size_t size, Alignment align, [[maybe_unused]] panda::ManagedThread *thread)
223 {
224     void *mem = nullptr;
225     size_t aligned_size = AlignUp(size, GetAlignmentInBytes(align));
226     if (LIKELY(aligned_size <= YOUNG_ALLOC_MAX_SIZE)) {
227         mem = young_gen_allocator_->Alloc(size, align);
228     } else {
229         mem = AllocateTenured(size);
230     }
231     return mem;
232 }
233 
234 template <MTModeT MTMode>
AllocateNonMovable(size_t size,Alignment align,panda::ManagedThread * thread)235 void *ObjectAllocatorGen<MTMode>::AllocateNonMovable(size_t size, Alignment align,
236                                                      [[maybe_unused]] panda::ManagedThread *thread)
237 {
238     // before pygote fork, allocate small non-movable objects in pygote space
239     if (UNLIKELY(IsPygoteAllocEnabled() && pygote_space_allocator_->CanAllocNonMovable(size, align))) {
240         return pygote_space_allocator_->Alloc(size, align);
241     }
242     void *mem = nullptr;
243     size_t aligned_size = AlignUp(size, GetAlignmentInBytes(align));
244     if (aligned_size <= ObjectAllocator::GetMaxSize()) {
245         size_t pool_size = std::max(PANDA_DEFAULT_POOL_SIZE, ObjectAllocator::GetMinPoolSize());
246         mem = AllocateSafe(size, align, non_movable_object_allocator_, pool_size,
247                            SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT, &heap_spaces_);
248     } else if (aligned_size <= LargeObjectAllocator::GetMaxSize()) {
249         size_t pool_size = std::max(PANDA_DEFAULT_POOL_SIZE, LargeObjectAllocator::GetMinPoolSize());
250         mem = AllocateSafe(size, align, large_non_movable_object_allocator_, pool_size,
251                            SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT, &heap_spaces_);
252     } else {
253         // We don't need special allocator for this
254         // Humongous objects are non-movable
255         size_t pool_size = std::max(PANDA_DEFAULT_POOL_SIZE, HumongousObjectAllocator::GetMinPoolSize(size));
256         mem = AllocateSafe(size, align, humongous_object_allocator_, pool_size, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT,
257                            &heap_spaces_);
258     }
259     return mem;
260 }
261 
262 template <MTModeT MTMode>
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)263 void ObjectAllocatorGen<MTMode>::VisitAndRemoveAllPools(const MemVisitor &mem_visitor)
264 {
265     if (pygote_space_allocator_ != nullptr) {
266         pygote_space_allocator_->VisitAndRemoveAllPools(mem_visitor);
267     }
268     object_allocator_->VisitAndRemoveAllPools(mem_visitor);
269     large_object_allocator_->VisitAndRemoveAllPools(mem_visitor);
270     humongous_object_allocator_->VisitAndRemoveAllPools(mem_visitor);
271     non_movable_object_allocator_->VisitAndRemoveAllPools(mem_visitor);
272     large_non_movable_object_allocator_->VisitAndRemoveAllPools(mem_visitor);
273 }
274 
275 template <MTModeT MTMode>
VisitAndRemoveFreePools(const MemVisitor & mem_visitor)276 void ObjectAllocatorGen<MTMode>::VisitAndRemoveFreePools(const MemVisitor &mem_visitor)
277 {
278     if (pygote_space_allocator_ != nullptr) {
279         pygote_space_allocator_->VisitAndRemoveFreePools(mem_visitor);
280     }
281     object_allocator_->VisitAndRemoveFreePools(mem_visitor);
282     large_object_allocator_->VisitAndRemoveFreePools(mem_visitor);
283     humongous_object_allocator_->VisitAndRemoveFreePools(mem_visitor);
284     non_movable_object_allocator_->VisitAndRemoveFreePools(mem_visitor);
285     large_non_movable_object_allocator_->VisitAndRemoveFreePools(mem_visitor);
286 }
287 
288 template <MTModeT MTMode>
IterateOverYoungObjects(const ObjectVisitor & object_visitor)289 void ObjectAllocatorGen<MTMode>::IterateOverYoungObjects(const ObjectVisitor &object_visitor)
290 {
291     young_gen_allocator_->IterateOverObjects(object_visitor);
292 }
293 
294 template <MTModeT MTMode>
IterateOverTenuredObjects(const ObjectVisitor & object_visitor)295 void ObjectAllocatorGen<MTMode>::IterateOverTenuredObjects(const ObjectVisitor &object_visitor)
296 {
297     if (pygote_space_allocator_ != nullptr) {
298         pygote_space_allocator_->IterateOverObjects(object_visitor);
299     }
300     object_allocator_->IterateOverObjects(object_visitor);
301     large_object_allocator_->IterateOverObjects(object_visitor);
302     humongous_object_allocator_->IterateOverObjects(object_visitor);
303     non_movable_object_allocator_->IterateOverObjects(object_visitor);
304     large_non_movable_object_allocator_->IterateOverObjects(object_visitor);
305 }
306 
307 template <MTModeT MTMode>
IterateOverObjects(const ObjectVisitor & object_visitor)308 void ObjectAllocatorGen<MTMode>::IterateOverObjects(const ObjectVisitor &object_visitor)
309 {
310     if (pygote_space_allocator_ != nullptr) {
311         pygote_space_allocator_->IterateOverObjects(object_visitor);
312     }
313     young_gen_allocator_->IterateOverObjects(object_visitor);
314     object_allocator_->IterateOverObjects(object_visitor);
315     large_object_allocator_->IterateOverObjects(object_visitor);
316     humongous_object_allocator_->IterateOverObjects(object_visitor);
317     non_movable_object_allocator_->IterateOverObjects(object_visitor);
318     large_non_movable_object_allocator_->IterateOverObjects(object_visitor);
319 }
320 
321 template <MTModeT MTMode>
IterateRegularSizeObjects(const ObjectVisitor & object_visitor)322 void ObjectAllocatorGen<MTMode>::IterateRegularSizeObjects(const ObjectVisitor &object_visitor)
323 {
324     object_allocator_->IterateOverObjects(object_visitor);
325 }
326 
327 template <MTModeT MTMode>
IterateNonRegularSizeObjects(const ObjectVisitor & object_visitor)328 void ObjectAllocatorGen<MTMode>::IterateNonRegularSizeObjects(const ObjectVisitor &object_visitor)
329 {
330     if (pygote_space_allocator_ != nullptr) {
331         pygote_space_allocator_->IterateOverObjects(object_visitor);
332     }
333     large_object_allocator_->IterateOverObjects(object_visitor);
334     humongous_object_allocator_->IterateOverObjects(object_visitor);
335     non_movable_object_allocator_->IterateOverObjects(object_visitor);
336     large_non_movable_object_allocator_->IterateOverObjects(object_visitor);
337 }
338 
339 template <MTModeT MTMode>
FreeObjectsMovedToPygoteSpace()340 void ObjectAllocatorGen<MTMode>::FreeObjectsMovedToPygoteSpace()
341 {
342     // clear because we have move all objects in it to pygote space
343     object_allocator_->VisitAndRemoveAllPools(
344         [](void *mem, size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
345     delete object_allocator_;
346     object_allocator_ = new (std::nothrow) ObjectAllocator(mem_stats_);
347 }
348 
349 template <MTModeT MTMode>
Collect(const GCObjectVisitor & gc_object_visitor,GCCollectMode collect_mode)350 void ObjectAllocatorGen<MTMode>::Collect(const GCObjectVisitor &gc_object_visitor, GCCollectMode collect_mode)
351 {
352     switch (collect_mode) {
353         case GCCollectMode::GC_MINOR:
354             break;
355         case GCCollectMode::GC_ALL:
356         case GCCollectMode::GC_MAJOR:
357             if (pygote_space_allocator_ != nullptr) {
358                 pygote_space_allocator_->Collect(gc_object_visitor);
359             }
360             object_allocator_->Collect(gc_object_visitor);
361             large_object_allocator_->Collect(gc_object_visitor);
362             humongous_object_allocator_->Collect(gc_object_visitor);
363             non_movable_object_allocator_->Collect(gc_object_visitor);
364             large_non_movable_object_allocator_->Collect(gc_object_visitor);
365             break;
366         case GCCollectMode::GC_FULL:
367             UNREACHABLE();
368             break;
369         case GC_NONE:
370             UNREACHABLE();
371             break;
372         default:
373             UNREACHABLE();
374     }
375 }
376 
377 template <MTModeT MTMode>
GetRegularObjectMaxSize()378 size_t ObjectAllocatorNoGen<MTMode>::GetRegularObjectMaxSize()
379 {
380     return ObjectAllocator::GetMaxSize();
381 }
382 
383 template <MTModeT MTMode>
GetLargeObjectMaxSize()384 size_t ObjectAllocatorNoGen<MTMode>::GetLargeObjectMaxSize()
385 {
386     return LargeObjectAllocator::GetMaxSize();
387 }
388 
389 template <MTModeT MTMode>
CreateNewTLAB(panda::ManagedThread * thread)390 TLAB *ObjectAllocatorNoGen<MTMode>::CreateNewTLAB([[maybe_unused]] panda::ManagedThread *thread)
391 {
392     // TODO(aemelenko): Implement this method
393     LOG(FATAL, ALLOC) << "Unimplemented";
394     return nullptr;
395 }
396 
397 template <MTModeT MTMode>
GetTLABMaxAllocSize()398 size_t ObjectAllocatorNoGen<MTMode>::GetTLABMaxAllocSize()
399 {
400     // TODO(aemelenko): TLAB usage is not supported for non-gen GCs.
401     return 0;
402 }
403 
ObjectAllocatorGenBase(MemStatsType * mem_stats,GCCollectMode gc_collect_mode,bool create_pygote_space_allocator)404 ObjectAllocatorGenBase::ObjectAllocatorGenBase(MemStatsType *mem_stats, GCCollectMode gc_collect_mode,
405                                                bool create_pygote_space_allocator)
406     : ObjectAllocatorBase(mem_stats, gc_collect_mode, create_pygote_space_allocator)
407 {
408     const auto &options = Runtime::GetOptions();
409     heap_spaces_.Initialize(options.GetInitYoungSpaceSize(), options.WasSetInitYoungSpaceSize(),
410                             options.GetYoungSpaceSize(), options.WasSetYoungSpaceSize(),
411                             MemConfig::GetInitialHeapSizeLimit(), MemConfig::GetHeapSizeLimit(),
412                             options.GetMinHeapFreePercentage(), options.GetMaxHeapFreePercentage());
413     if (create_pygote_space_allocator) {
414         ASSERT(pygote_space_allocator_ != nullptr);
415         pygote_space_allocator_->SetHeapSpace(&heap_spaces_);
416     }
417 }
418 
419 template <MTModeT MTMode>
ObjectAllocatorGen(MemStatsType * mem_stats,bool create_pygote_space_allocator)420 ObjectAllocatorGen<MTMode>::ObjectAllocatorGen(MemStatsType *mem_stats, bool create_pygote_space_allocator)
421     : ObjectAllocatorGenBase(mem_stats, GCCollectMode::GC_ALL, create_pygote_space_allocator)
422 {
423     // For Gen-GC we use alone pool for young space, so we will use full such pool
424     heap_spaces_.UseFullYoungSpace();
425     size_t young_space_size = heap_spaces_.GetCurrentMaxYoungSize();
426     auto young_shared_space_size = Runtime::GetOptions().GetYoungSharedSpaceSize();
427     ASSERT(young_space_size >= young_shared_space_size);
428     auto tlabs_count_in_young_gen = (young_space_size - young_shared_space_size) / YOUNG_TLAB_SIZE;
429     ASSERT(((young_space_size - young_shared_space_size) % YOUNG_TLAB_SIZE) == 0);
430     ASSERT(YOUNG_ALLOC_MAX_SIZE <= YOUNG_TLAB_SIZE);
431     ASSERT(tlabs_count_in_young_gen * YOUNG_TLAB_SIZE <= young_space_size);
432 
433     // TODO(aemelenko): Missed an allocator pointer
434     // because we construct BumpPointer Allocator after calling AllocArena method
435     auto young_pool = heap_spaces_.AllocAlonePoolForYoung(SpaceType::SPACE_TYPE_OBJECT,
436                                                           YoungGenAllocator::GetAllocatorType(), &young_gen_allocator_);
437     young_gen_allocator_ = new (std::nothrow)
438         YoungGenAllocator(std::move(young_pool), SpaceType::SPACE_TYPE_OBJECT, mem_stats, tlabs_count_in_young_gen);
439     object_allocator_ = new (std::nothrow) ObjectAllocator(mem_stats);
440     large_object_allocator_ = new (std::nothrow) LargeObjectAllocator(mem_stats);
441     humongous_object_allocator_ = new (std::nothrow) HumongousObjectAllocator(mem_stats);
442     non_movable_object_allocator_ =
443         new (std::nothrow) ObjectAllocator(mem_stats, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
444     large_non_movable_object_allocator_ =
445         new (std::nothrow) LargeObjectAllocator(mem_stats, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
446     mem_stats_ = mem_stats;
447     GetYoungRanges().push_back({0, 0});
448 }
449 
450 template <MTModeT MTMode>
~ObjectAllocatorGen()451 ObjectAllocatorGen<MTMode>::~ObjectAllocatorGen()
452 {
453     // need to free the pool space when the allocator destroy
454     young_gen_allocator_->VisitAndRemoveAllPools(
455         [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
456     delete young_gen_allocator_;
457     delete object_allocator_;
458     delete large_object_allocator_;
459     delete humongous_object_allocator_;
460     delete non_movable_object_allocator_;
461     delete large_non_movable_object_allocator_;
462 }
463 template <MTModeT MTMode>
GetRegularObjectMaxSize()464 size_t ObjectAllocatorGen<MTMode>::GetRegularObjectMaxSize()
465 {
466     return ObjectAllocator::GetMaxSize();
467 }
468 
469 template <MTModeT MTMode>
GetLargeObjectMaxSize()470 size_t ObjectAllocatorGen<MTMode>::GetLargeObjectMaxSize()
471 {
472     return LargeObjectAllocator::GetMaxSize();
473 }
474 
475 template <MTModeT MTMode>
IsAddressInYoungSpace(uintptr_t address)476 bool ObjectAllocatorGen<MTMode>::IsAddressInYoungSpace(uintptr_t address)
477 {
478     return young_gen_allocator_->GetMemRange().IsAddressInRange(address);
479 }
480 
481 template <MTModeT MTMode>
IsIntersectedWithYoung(const MemRange & mem_range)482 bool ObjectAllocatorGen<MTMode>::IsIntersectedWithYoung(const MemRange &mem_range)
483 {
484     return young_gen_allocator_->GetMemRange().IsIntersect(mem_range);
485 }
486 
487 template <MTModeT MTMode>
IsObjectInNonMovableSpace(const ObjectHeader * obj)488 bool ObjectAllocatorGen<MTMode>::IsObjectInNonMovableSpace(const ObjectHeader *obj)
489 {
490     return non_movable_object_allocator_->ContainObject(obj);
491 }
492 
493 template <MTModeT MTMode>
HasYoungSpace()494 bool ObjectAllocatorGen<MTMode>::HasYoungSpace()
495 {
496     return young_gen_allocator_ != nullptr;
497 }
498 
499 template <MTModeT MTMode>
GetYoungSpaceMemRanges()500 const std::vector<MemRange> &ObjectAllocatorGen<MTMode>::GetYoungSpaceMemRanges()
501 {
502     return GetYoungRanges();
503 }
504 
505 template <MTModeT MTMode>
GetYoungSpaceBitmaps()506 std::vector<MarkBitmap *> &ObjectAllocatorGen<MTMode>::GetYoungSpaceBitmaps()
507 {
508     static std::vector<MarkBitmap *> ret;
509     LOG(FATAL, ALLOC) << "GetYoungSpaceBitmaps not applicable for ObjectAllocatorGen";
510     return ret;
511 }
512 
513 template <MTModeT MTMode>
ResetYoungAllocator()514 void ObjectAllocatorGen<MTMode>::ResetYoungAllocator()
515 {
516     MemStatsType *mem_stats = mem_stats_;
517     auto thread_callback = [&mem_stats](ManagedThread *thread) {
518         if (!PANDA_TRACK_TLAB_ALLOCATIONS && (thread->GetTLAB()->GetOccupiedSize() != 0)) {
519             mem_stats->RecordAllocateObject(thread->GetTLAB()->GetOccupiedSize(), SpaceType::SPACE_TYPE_OBJECT);
520         }
521         thread->ClearTLAB();
522         return true;
523     };
524     // NOLINTNEXTLINE(readability-braces-around-statements)
525     if constexpr (MTMode == MT_MODE_MULTI) {
526         Thread::GetCurrent()->GetVM()->GetThreadManager()->EnumerateThreads(thread_callback);
527     } else {  // NOLINT(readability-misleading-indentation)
528         thread_callback(Thread::GetCurrent()->GetVM()->GetAssociatedThread());
529     }
530     young_gen_allocator_->Reset();
531 }
532 
533 template <MTModeT MTMode>
CreateNewTLAB(panda::ManagedThread * thread)534 TLAB *ObjectAllocatorGen<MTMode>::CreateNewTLAB([[maybe_unused]] panda::ManagedThread *thread)
535 {
536     return young_gen_allocator_->CreateNewTLAB(YOUNG_TLAB_SIZE);
537 }
538 
539 template <MTModeT MTMode>
GetTLABMaxAllocSize()540 size_t ObjectAllocatorGen<MTMode>::GetTLABMaxAllocSize()
541 {
542     return YOUNG_ALLOC_MAX_SIZE;
543 }
544 
545 template <MTModeT MTMode>
IterateOverObjectsInRange(MemRange mem_range,const ObjectVisitor & object_visitor)546 void ObjectAllocatorGen<MTMode>::IterateOverObjectsInRange(MemRange mem_range, const ObjectVisitor &object_visitor)
547 {
548     // we need ensure that the mem range related to a card must be located in one allocator
549     auto space_type = PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(ToVoidPtr(mem_range.GetStartAddress()));
550     auto alloc_info = PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ToVoidPtr(mem_range.GetStartAddress()));
551     auto *allocator = alloc_info.GetAllocatorHeaderAddr();
552     switch (space_type) {
553         case SpaceType::SPACE_TYPE_OBJECT:
554             if (allocator == object_allocator_) {
555                 object_allocator_->IterateOverObjectsInRange(object_visitor, ToVoidPtr(mem_range.GetStartAddress()),
556                                                              ToVoidPtr(mem_range.GetEndAddress()));
557             } else if (allocator == pygote_space_allocator_) {
558                 pygote_space_allocator_->IterateOverObjectsInRange(
559                     object_visitor, ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()));
560             } else if (allocator == &young_gen_allocator_) {
561                 young_gen_allocator_->IterateOverObjectsInRange(object_visitor, ToVoidPtr(mem_range.GetStartAddress()),
562                                                                 ToVoidPtr(mem_range.GetEndAddress()));
563             } else if (allocator == large_object_allocator_) {
564                 large_object_allocator_->IterateOverObjectsInRange(
565                     object_visitor, ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()));
566             } else {
567                 // if we reach this line, we may have an issue with multiVM CardTable iteration
568                 UNREACHABLE();
569             }
570             break;
571         case SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT:
572             if (allocator == humongous_object_allocator_) {
573                 humongous_object_allocator_->IterateOverObjectsInRange(
574                     object_visitor, ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()));
575             } else {
576                 // if we reach this line, we may have an issue with multiVM CardTable iteration
577                 UNREACHABLE();
578             }
579             break;
580         case SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT:
581             if (allocator == non_movable_object_allocator_) {
582                 non_movable_object_allocator_->IterateOverObjectsInRange(
583                     object_visitor, ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()));
584             } else if (allocator == large_non_movable_object_allocator_) {
585                 large_non_movable_object_allocator_->IterateOverObjectsInRange(
586                     object_visitor, ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()));
587             } else {
588                 // if we reach this line, we may have an issue with multiVM CardTable iteration
589                 UNREACHABLE();
590             }
591             break;
592         default:
593             // if we reach this line, we may have an issue with multiVM CardTable iteration
594             UNREACHABLE();
595             break;
596     }
597 }
598 
599 template <MTModeT MTMode>
ContainObject(const ObjectHeader * obj) const600 bool ObjectAllocatorGen<MTMode>::ContainObject(const ObjectHeader *obj) const
601 {
602     if (pygote_space_allocator_ != nullptr && pygote_space_allocator_->ContainObject(obj)) {
603         return true;
604     }
605     if (young_gen_allocator_->ContainObject(obj)) {
606         return true;
607     }
608     if (object_allocator_->ContainObject(obj)) {
609         return true;
610     }
611     if (large_object_allocator_->ContainObject(obj)) {
612         return true;
613     }
614     if (humongous_object_allocator_->ContainObject(obj)) {
615         return true;
616     }
617     if (non_movable_object_allocator_->ContainObject(obj)) {
618         return true;
619     }
620     if (large_non_movable_object_allocator_->ContainObject(obj)) {
621         return true;
622     }
623 
624     return false;
625 }
626 
627 template <MTModeT MTMode>
IsLive(const ObjectHeader * obj)628 bool ObjectAllocatorGen<MTMode>::IsLive(const ObjectHeader *obj)
629 {
630     if (pygote_space_allocator_ != nullptr && pygote_space_allocator_->ContainObject(obj)) {
631         return pygote_space_allocator_->IsLive(obj);
632     }
633     if (young_gen_allocator_->ContainObject(obj)) {
634         return young_gen_allocator_->IsLive(obj);
635     }
636     if (object_allocator_->ContainObject(obj)) {
637         return object_allocator_->IsLive(obj);
638     }
639     if (large_object_allocator_->ContainObject(obj)) {
640         return large_object_allocator_->IsLive(obj);
641     }
642     if (humongous_object_allocator_->ContainObject(obj)) {
643         return humongous_object_allocator_->IsLive(obj);
644     }
645     if (non_movable_object_allocator_->ContainObject(obj)) {
646         return non_movable_object_allocator_->IsLive(obj);
647     }
648     if (large_non_movable_object_allocator_->ContainObject(obj)) {
649         return large_non_movable_object_allocator_->IsLive(obj);
650     }
651 
652     return false;
653 }
654 
655 template <MTModeT MTMode>
UpdateSpaceData()656 void ObjectAllocatorGen<MTMode>::UpdateSpaceData()
657 {
658     GetYoungRanges().push_back(young_gen_allocator_->GetMemRange());
659 }
660 
InvalidateSpaceData()661 void ObjectAllocatorGenBase::InvalidateSpaceData()
662 {
663     ranges_.clear();
664     young_bitmaps_.clear();
665 }
666 
667 template class ObjectAllocatorGen<MT_MODE_SINGLE>;
668 template class ObjectAllocatorGen<MT_MODE_MULTI>;
669 template class ObjectAllocatorNoGen<MT_MODE_SINGLE>;
670 template class ObjectAllocatorNoGen<MT_MODE_MULTI>;
671 
672 }  // namespace panda::mem
673