1 /**
2 * Copyright (c) 2024-2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 // These includes to avoid linker error:
16
17 #include "runtime/arch/memory_helpers.h"
18 #include "runtime/include/mem/allocator.h"
19 #include "runtime/include/mem/allocator-inl.h"
20 #include "mem/mem_pool.h"
21 #include "mem/mem_config.h"
22 #include "mem/mem.h"
23 #include "runtime/include/runtime.h"
24 #include "runtime/include/panda_vm.h"
25 #include "runtime/include/object_header.h"
26 #include "runtime/mem/bump-allocator-inl.h"
27 #include "runtime/mem/freelist_allocator-inl.h"
28 #include "runtime/mem/internal_allocator-inl.h"
29 #include "runtime/mem/runslots_allocator-inl.h"
30 #include "runtime/mem/pygote_space_allocator-inl.h"
31 #include "runtime/mem/tlab.h"
32
33 namespace ark::mem {
34
35 Allocator::~Allocator() = default;
36
ObjectAllocatorBase(MemStatsType * memStats,GCCollectMode gcCollectMode,bool createPygoteSpaceAllocator)37 ObjectAllocatorBase::ObjectAllocatorBase(MemStatsType *memStats, GCCollectMode gcCollectMode,
38 bool createPygoteSpaceAllocator)
39 : Allocator(memStats, AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT, gcCollectMode)
40 {
41 if (createPygoteSpaceAllocator) {
42 pygoteSpaceAllocator_ = new (std::nothrow) PygoteAllocator(memStats);
43 pygoteAllocEnabled_ = true;
44 }
45 }
46
~ObjectAllocatorBase()47 ObjectAllocatorBase::~ObjectAllocatorBase()
48 {
49 // NOLINTNEXTLINE(readability-delete-null-pointer)
50 if (pygoteSpaceAllocator_ != nullptr) {
51 delete pygoteSpaceAllocator_;
52 pygoteSpaceAllocator_ = nullptr;
53 }
54 }
55
HaveEnoughPoolsInObjectSpace(size_t poolsNum) const56 bool ObjectAllocatorBase::HaveEnoughPoolsInObjectSpace(size_t poolsNum) const
57 {
58 auto memPool = PoolManager::GetMmapMemPool();
59 auto poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, PANDA_DEFAULT_ALLOCATOR_POOL_SIZE);
60 return memPool->HaveEnoughPoolsInObjectSpace(poolsNum, poolSize);
61 }
62
MemoryInitialize(void * mem,size_t size) const63 void ObjectAllocatorBase::MemoryInitialize(void *mem, size_t size) const
64 {
65 TSAN_ANNOTATE_IGNORE_WRITES_BEGIN();
66 memset_s(mem, size, 0, size);
67 TSAN_ANNOTATE_IGNORE_WRITES_END();
68 // zero init should be visible from other threads even if pointer to object was fetched
69 // without 'volatile' specifier so full memory barrier is required
70 // required by some language-specs
71 arch::FullMemoryBarrier();
72 }
73
ObjectMemoryInit(void * mem,size_t size) const74 void ObjectAllocatorBase::ObjectMemoryInit(void *mem, size_t size) const
75 {
76 if (mem == nullptr) {
77 return;
78 }
79 [[maybe_unused]] auto *object = static_cast<ObjectHeader *>(mem);
80 ASSERT(object->AtomicGetMark().GetValue() == 0);
81 ASSERT(object->ClassAddr<BaseClass *>() == nullptr);
82 ASSERT(size >= ObjectHeader::ObjectHeaderSize());
83 // zeroing according to newobj description in ISA
84 size_t sizeToInit = size - ObjectHeader::ObjectHeaderSize();
85 void *memToInit = ToVoidPtr(ToUintPtr(mem) + ObjectHeader::ObjectHeaderSize());
86 MemoryInitialize(memToInit, sizeToInit);
87 }
88
89 template <MTModeT MT_MODE>
ObjectAllocatorNoGen(MemStatsType * memStats,bool createPygoteSpaceAllocator)90 ObjectAllocatorNoGen<MT_MODE>::ObjectAllocatorNoGen(MemStatsType *memStats, bool createPygoteSpaceAllocator)
91 : ObjectAllocatorBase(memStats, GCCollectMode::GC_ALL, createPygoteSpaceAllocator)
92 {
93 const auto &options = Runtime::GetOptions();
94 heapSpace_.Initialize(MemConfig::GetInitialHeapSizeLimit(), MemConfig::GetHeapSizeLimit(),
95 options.GetMinHeapFreePercentage(), options.GetMaxHeapFreePercentage());
96 if (createPygoteSpaceAllocator) {
97 ASSERT(pygoteSpaceAllocator_ != nullptr);
98 pygoteSpaceAllocator_->SetHeapSpace(&heapSpace_);
99 }
100 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats);
101 ASSERT(objectAllocator_ != nullptr);
102 largeObjectAllocator_ = new (std::nothrow) LargeObjectAllocator(memStats);
103 ASSERT(largeObjectAllocator_ != nullptr);
104 humongousObjectAllocator_ = new (std::nothrow) HumongousObjectAllocator(memStats);
105 ASSERT(humongousObjectAllocator_ != nullptr);
106 }
107
108 template <MTModeT MT_MODE>
~ObjectAllocatorNoGen()109 ObjectAllocatorNoGen<MT_MODE>::~ObjectAllocatorNoGen()
110 {
111 delete objectAllocator_;
112 delete largeObjectAllocator_;
113 delete humongousObjectAllocator_;
114 }
115
116 template <MTModeT MT_MODE>
Allocate(size_t size,Alignment align,ark::ManagedThread * thread,ObjMemInitPolicy objInit,bool pinned)117 void *ObjectAllocatorNoGen<MT_MODE>::Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
118 ObjMemInitPolicy objInit, [[maybe_unused]] bool pinned)
119 {
120 void *mem = nullptr;
121 size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
122 if (alignedSize <= ObjectAllocator::GetMaxSize()) {
123 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, ObjectAllocator::GetMinPoolSize());
124 mem = AllocateSafe(size, align, objectAllocator_, poolSize, SpaceType::SPACE_TYPE_OBJECT, &heapSpace_);
125 } else if (alignedSize <= LargeObjectAllocator::GetMaxSize()) {
126 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, LargeObjectAllocator::GetMinPoolSize());
127 mem = AllocateSafe(size, align, largeObjectAllocator_, poolSize, SpaceType::SPACE_TYPE_OBJECT, &heapSpace_);
128 } else {
129 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, HumongousObjectAllocator::GetMinPoolSize(size));
130 mem = AllocateSafe(size, align, humongousObjectAllocator_, poolSize, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT,
131 &heapSpace_);
132 }
133 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
134 ObjectMemoryInit(mem, size);
135 }
136 return mem;
137 }
138
139 template <MTModeT MT_MODE>
AllocateNonMovable(size_t size,Alignment align,ark::ManagedThread * thread,ObjMemInitPolicy objInit)140 void *ObjectAllocatorNoGen<MT_MODE>::AllocateNonMovable(size_t size, Alignment align, ark::ManagedThread *thread,
141 ObjMemInitPolicy objInit)
142 {
143 void *mem = nullptr;
144 // before pygote fork, allocate small non-movable objects in pygote space
145 if (UNLIKELY(IsPygoteAllocEnabled() && pygoteSpaceAllocator_->CanAllocNonMovable(size, align))) {
146 mem = pygoteSpaceAllocator_->Alloc(size, align);
147 } else {
148 // Without generations - no compaction now, so all allocations are non-movable
149 mem = Allocate(size, align, thread, objInit, false);
150 }
151 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
152 ObjectMemoryInit(mem, size);
153 }
154 return mem;
155 }
156
157 template <MTModeT MT_MODE>
CalculateAllocatorAlignment(size_t align)158 Alignment ObjectAllocatorNoGen<MT_MODE>::CalculateAllocatorAlignment(size_t align)
159 {
160 ASSERT(GetPurpose() == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT);
161 return GetAlignment(align);
162 }
163
164 template <MTModeT MT_MODE>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)165 void ObjectAllocatorNoGen<MT_MODE>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
166 {
167 if (pygoteSpaceAllocator_ != nullptr) {
168 pygoteSpaceAllocator_->VisitAndRemoveAllPools(memVisitor);
169 }
170 objectAllocator_->VisitAndRemoveAllPools(memVisitor);
171 largeObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
172 humongousObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
173 }
174
175 template <MTModeT MT_MODE>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)176 void ObjectAllocatorNoGen<MT_MODE>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
177 {
178 if (pygoteSpaceAllocator_ != nullptr) {
179 pygoteSpaceAllocator_->VisitAndRemoveFreePools(memVisitor);
180 }
181 objectAllocator_->VisitAndRemoveFreePools(memVisitor);
182 largeObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
183 humongousObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
184 }
185
186 template <MTModeT MT_MODE>
IterateOverObjects(const ObjectVisitor & objectVisitor)187 void ObjectAllocatorNoGen<MT_MODE>::IterateOverObjects(const ObjectVisitor &objectVisitor)
188 {
189 if (pygoteSpaceAllocator_ != nullptr) {
190 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
191 }
192 objectAllocator_->IterateOverObjects(objectVisitor);
193 largeObjectAllocator_->IterateOverObjects(objectVisitor);
194 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
195 }
196
197 template <MTModeT MT_MODE>
IterateRegularSizeObjects(const ObjectVisitor & objectVisitor)198 void ObjectAllocatorNoGen<MT_MODE>::IterateRegularSizeObjects(const ObjectVisitor &objectVisitor)
199 {
200 objectAllocator_->IterateOverObjects(objectVisitor);
201 }
202
203 template <MTModeT MT_MODE>
IterateNonRegularSizeObjects(const ObjectVisitor & objectVisitor)204 void ObjectAllocatorNoGen<MT_MODE>::IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor)
205 {
206 if (pygoteSpaceAllocator_ != nullptr) {
207 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
208 }
209 largeObjectAllocator_->IterateOverObjects(objectVisitor);
210 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
211 }
212
213 template <MTModeT MT_MODE>
FreeObjectsMovedToPygoteSpace()214 void ObjectAllocatorNoGen<MT_MODE>::FreeObjectsMovedToPygoteSpace()
215 {
216 // clear because we have move all objects in it to pygote space
217 objectAllocator_->VisitAndRemoveAllPools(
218 [](void *mem, size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
219 delete objectAllocator_;
220 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats_);
221 ASSERT(objectAllocator_ != nullptr);
222 }
223
224 template <MTModeT MT_MODE>
Collect(const GCObjectVisitor & gcObjectVisitor,GCCollectMode collectMode)225 void ObjectAllocatorNoGen<MT_MODE>::Collect(const GCObjectVisitor &gcObjectVisitor,
226 [[maybe_unused]] GCCollectMode collectMode)
227 {
228 if (pygoteSpaceAllocator_ != nullptr) {
229 pygoteSpaceAllocator_->Collect(gcObjectVisitor);
230 }
231 objectAllocator_->Collect(gcObjectVisitor);
232 largeObjectAllocator_->Collect(gcObjectVisitor);
233 humongousObjectAllocator_->Collect(gcObjectVisitor);
234 }
235
236 // if there is a common base class for these allocators, we could split this func and return the pointer to the
237 // allocator containing the object
238 template <MTModeT MT_MODE>
ContainObject(const ObjectHeader * obj) const239 bool ObjectAllocatorNoGen<MT_MODE>::ContainObject(const ObjectHeader *obj) const
240 {
241 if (objectAllocator_->ContainObject(obj)) {
242 return true;
243 }
244 if (largeObjectAllocator_->ContainObject(obj)) {
245 return true;
246 }
247 if (humongousObjectAllocator_->ContainObject(obj)) {
248 return true;
249 }
250
251 return false;
252 }
253
254 template <MTModeT MT_MODE>
IsLive(const ObjectHeader * obj)255 bool ObjectAllocatorNoGen<MT_MODE>::IsLive(const ObjectHeader *obj)
256 {
257 if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
258 return pygoteSpaceAllocator_->IsLive(obj);
259 }
260 if (objectAllocator_->ContainObject(obj)) {
261 return objectAllocator_->IsLive(obj);
262 }
263 if (largeObjectAllocator_->ContainObject(obj)) {
264 return largeObjectAllocator_->IsLive(obj);
265 }
266 if (humongousObjectAllocator_->ContainObject(obj)) {
267 return humongousObjectAllocator_->IsLive(obj);
268 }
269 return false;
270 }
271
272 template <MTModeT MT_MODE>
Allocate(size_t size,Alignment align,ark::ManagedThread * thread,ObjMemInitPolicy objInit,bool pinned)273 void *ObjectAllocatorGen<MT_MODE>::Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
274 ObjMemInitPolicy objInit, [[maybe_unused]] bool pinned)
275 {
276 void *mem = nullptr;
277 size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
278 if (LIKELY(alignedSize <= GetYoungAllocMaxSize())) {
279 mem = youngGenAllocator_->Alloc(size, align);
280 } else {
281 mem = AllocateTenured(size);
282 }
283 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
284 ObjectMemoryInit(mem, size);
285 }
286 return mem;
287 }
288
289 template <MTModeT MT_MODE>
AllocateNonMovable(size_t size,Alignment align,ark::ManagedThread * thread,ObjMemInitPolicy objInit)290 void *ObjectAllocatorGen<MT_MODE>::AllocateNonMovable(size_t size, Alignment align,
291 [[maybe_unused]] ark::ManagedThread *thread,
292 ObjMemInitPolicy objInit)
293 {
294 void *mem = nullptr;
295 // before pygote fork, allocate small non-movable objects in pygote space
296 if (UNLIKELY(IsPygoteAllocEnabled() && pygoteSpaceAllocator_->CanAllocNonMovable(size, align))) {
297 mem = pygoteSpaceAllocator_->Alloc(size, align);
298 } else {
299 size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
300 if (alignedSize <= ObjectAllocator::GetMaxSize()) {
301 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, ObjectAllocator::GetMinPoolSize());
302 mem = AllocateSafe(size, align, nonMovableObjectAllocator_, poolSize,
303 SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT, &heapSpaces_);
304 } else if (alignedSize <= LargeObjectAllocator::GetMaxSize()) {
305 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, LargeObjectAllocator::GetMinPoolSize());
306 mem = AllocateSafe(size, align, largeNonMovableObjectAllocator_, poolSize,
307 SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT, &heapSpaces_);
308 } else {
309 // We don't need special allocator for this
310 // Humongous objects are non-movable
311 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, HumongousObjectAllocator::GetMinPoolSize(size));
312 mem = AllocateSafe(size, align, humongousObjectAllocator_, poolSize, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT,
313 &heapSpaces_);
314 }
315 }
316 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
317 ObjectMemoryInit(mem, size);
318 }
319 return mem;
320 }
321
322 template <MTModeT MT_MODE>
CalculateAllocatorAlignment(size_t align)323 Alignment ObjectAllocatorGen<MT_MODE>::CalculateAllocatorAlignment(size_t align)
324 {
325 ASSERT(GetPurpose() == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT);
326 return GetAlignment(align);
327 }
328
329 template <MTModeT MT_MODE>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)330 void ObjectAllocatorGen<MT_MODE>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
331 {
332 if (pygoteSpaceAllocator_ != nullptr) {
333 pygoteSpaceAllocator_->VisitAndRemoveAllPools(memVisitor);
334 }
335 objectAllocator_->VisitAndRemoveAllPools(memVisitor);
336 largeObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
337 humongousObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
338 nonMovableObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
339 largeNonMovableObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
340 }
341
342 template <MTModeT MT_MODE>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)343 void ObjectAllocatorGen<MT_MODE>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
344 {
345 if (pygoteSpaceAllocator_ != nullptr) {
346 pygoteSpaceAllocator_->VisitAndRemoveFreePools(memVisitor);
347 }
348 objectAllocator_->VisitAndRemoveFreePools(memVisitor);
349 largeObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
350 humongousObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
351 nonMovableObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
352 largeNonMovableObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
353 }
354
355 template <MTModeT MT_MODE>
IterateOverYoungObjects(const ObjectVisitor & objectVisitor)356 void ObjectAllocatorGen<MT_MODE>::IterateOverYoungObjects(const ObjectVisitor &objectVisitor)
357 {
358 youngGenAllocator_->IterateOverObjects(objectVisitor);
359 }
360
361 template <MTModeT MT_MODE>
IterateOverTenuredObjects(const ObjectVisitor & objectVisitor)362 void ObjectAllocatorGen<MT_MODE>::IterateOverTenuredObjects(const ObjectVisitor &objectVisitor)
363 {
364 if (pygoteSpaceAllocator_ != nullptr) {
365 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
366 }
367 objectAllocator_->IterateOverObjects(objectVisitor);
368 largeObjectAllocator_->IterateOverObjects(objectVisitor);
369 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
370 nonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
371 largeNonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
372 }
373
374 template <MTModeT MT_MODE>
IterateOverObjects(const ObjectVisitor & objectVisitor)375 void ObjectAllocatorGen<MT_MODE>::IterateOverObjects(const ObjectVisitor &objectVisitor)
376 {
377 if (pygoteSpaceAllocator_ != nullptr) {
378 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
379 }
380 youngGenAllocator_->IterateOverObjects(objectVisitor);
381 objectAllocator_->IterateOverObjects(objectVisitor);
382 largeObjectAllocator_->IterateOverObjects(objectVisitor);
383 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
384 nonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
385 largeNonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
386 }
387
388 template <MTModeT MT_MODE>
IterateRegularSizeObjects(const ObjectVisitor & objectVisitor)389 void ObjectAllocatorGen<MT_MODE>::IterateRegularSizeObjects(const ObjectVisitor &objectVisitor)
390 {
391 objectAllocator_->IterateOverObjects(objectVisitor);
392 }
393
394 template <MTModeT MT_MODE>
IterateNonRegularSizeObjects(const ObjectVisitor & objectVisitor)395 void ObjectAllocatorGen<MT_MODE>::IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor)
396 {
397 if (pygoteSpaceAllocator_ != nullptr) {
398 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
399 }
400 largeObjectAllocator_->IterateOverObjects(objectVisitor);
401 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
402 nonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
403 largeNonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
404 }
405
406 template <MTModeT MT_MODE>
FreeObjectsMovedToPygoteSpace()407 void ObjectAllocatorGen<MT_MODE>::FreeObjectsMovedToPygoteSpace()
408 {
409 // clear because we have move all objects in it to pygote space
410 objectAllocator_->VisitAndRemoveAllPools(
411 [](void *mem, size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
412 delete objectAllocator_;
413 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats_);
414 }
415
416 template <MTModeT MT_MODE>
Collect(const GCObjectVisitor & gcObjectVisitor,GCCollectMode collectMode)417 void ObjectAllocatorGen<MT_MODE>::Collect(const GCObjectVisitor &gcObjectVisitor, GCCollectMode collectMode)
418 {
419 switch (collectMode) {
420 case GCCollectMode::GC_MINOR:
421 break;
422 case GCCollectMode::GC_ALL:
423 case GCCollectMode::GC_MAJOR:
424 if (pygoteSpaceAllocator_ != nullptr) {
425 pygoteSpaceAllocator_->Collect(gcObjectVisitor);
426 }
427 objectAllocator_->Collect(gcObjectVisitor);
428 largeObjectAllocator_->Collect(gcObjectVisitor);
429 humongousObjectAllocator_->Collect(gcObjectVisitor);
430 nonMovableObjectAllocator_->Collect(gcObjectVisitor);
431 largeNonMovableObjectAllocator_->Collect(gcObjectVisitor);
432 break;
433 case GCCollectMode::GC_FULL:
434 UNREACHABLE();
435 break;
436 case GC_NONE:
437 UNREACHABLE();
438 break;
439 default:
440 UNREACHABLE();
441 }
442 }
443
444 template <MTModeT MT_MODE>
GetRegularObjectMaxSize()445 size_t ObjectAllocatorNoGen<MT_MODE>::GetRegularObjectMaxSize()
446 {
447 return ObjectAllocator::GetMaxSize();
448 }
449
450 template <MTModeT MT_MODE>
GetLargeObjectMaxSize()451 size_t ObjectAllocatorNoGen<MT_MODE>::GetLargeObjectMaxSize()
452 {
453 return LargeObjectAllocator::GetMaxSize();
454 }
455
456 template <MTModeT MT_MODE>
CreateNewTLAB(size_t tlabSize)457 TLAB *ObjectAllocatorNoGen<MT_MODE>::CreateNewTLAB([[maybe_unused]] size_t tlabSize)
458 {
459 LOG(FATAL, ALLOC) << "TLAB is not supported for this allocator";
460 return nullptr;
461 }
462
463 template <MTModeT MT_MODE>
GetTLABMaxAllocSize()464 size_t ObjectAllocatorNoGen<MT_MODE>::GetTLABMaxAllocSize()
465 {
466 // NOTE(aemelenko): TLAB usage is not supported for non-gen GCs.
467 return 0;
468 }
469
ObjectAllocatorGenBase(MemStatsType * memStats,GCCollectMode gcCollectMode,bool createPygoteSpaceAllocator)470 ObjectAllocatorGenBase::ObjectAllocatorGenBase(MemStatsType *memStats, GCCollectMode gcCollectMode,
471 bool createPygoteSpaceAllocator)
472 : ObjectAllocatorBase(memStats, gcCollectMode, createPygoteSpaceAllocator)
473 {
474 const auto &options = Runtime::GetOptions();
475 heapSpaces_.Initialize(options.GetInitYoungSpaceSize(), options.WasSetInitYoungSpaceSize(),
476 options.GetYoungSpaceSize(), options.WasSetYoungSpaceSize(),
477 MemConfig::GetInitialHeapSizeLimit(), MemConfig::GetHeapSizeLimit(),
478 options.GetMinHeapFreePercentage(), options.GetMaxHeapFreePercentage());
479 if (createPygoteSpaceAllocator) {
480 ASSERT(pygoteSpaceAllocator_ != nullptr);
481 pygoteSpaceAllocator_->SetHeapSpace(&heapSpaces_);
482 }
483 }
484
485 template <MTModeT MT_MODE>
ObjectAllocatorGen(MemStatsType * memStats,bool createPygoteSpaceAllocator)486 ObjectAllocatorGen<MT_MODE>::ObjectAllocatorGen(MemStatsType *memStats, bool createPygoteSpaceAllocator)
487 : ObjectAllocatorGenBase(memStats, GCCollectMode::GC_ALL, createPygoteSpaceAllocator)
488 {
489 // For Gen-GC we use alone pool for young space, so we will use full such pool
490 heapSpaces_.UseFullYoungSpace();
491 size_t youngSpaceSize = heapSpaces_.GetCurrentYoungSize();
492 size_t initTlabSize = Runtime::GetOptions().GetInitTlabSize();
493 auto youngSharedSpaceSize = Runtime::GetOptions().GetYoungSharedSpaceSize();
494 ASSERT(youngSpaceSize >= youngSharedSpaceSize);
495 ASSERT(initTlabSize != 0);
496 size_t maxTlabsCountInYoungGen;
497 if constexpr (MT_MODE == MT_MODE_SINGLE) {
498 // For single-threaded VMs allocate whole private young space for TLAB
499 maxTlabsCountInYoungGen = 1;
500 } else {
501 maxTlabsCountInYoungGen = (youngSpaceSize - youngSharedSpaceSize) / initTlabSize;
502 ASSERT(((youngSpaceSize - youngSharedSpaceSize) % initTlabSize) == 0);
503 }
504 ASSERT(maxTlabsCountInYoungGen * initTlabSize <= youngSpaceSize);
505
506 // NOTE(aemelenko): Missed an allocator pointer
507 // because we construct BumpPointer Allocator after calling AllocArena method
508 auto youngPool = heapSpaces_.AllocAlonePoolForYoung(SpaceType::SPACE_TYPE_OBJECT,
509 YoungGenAllocator::GetAllocatorType(), &youngGenAllocator_);
510 youngGenAllocator_ = new (std::nothrow)
511 YoungGenAllocator(std::move(youngPool), SpaceType::SPACE_TYPE_OBJECT, memStats, maxTlabsCountInYoungGen);
512 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats);
513 largeObjectAllocator_ = new (std::nothrow) LargeObjectAllocator(memStats);
514 humongousObjectAllocator_ = new (std::nothrow) HumongousObjectAllocator(memStats);
515 nonMovableObjectAllocator_ = new (std::nothrow) ObjectAllocator(memStats, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
516 largeNonMovableObjectAllocator_ =
517 new (std::nothrow) LargeObjectAllocator(memStats, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
518 memStats_ = memStats;
519 GetYoungRanges().push_back({0, 0});
520 }
521
522 template <MTModeT MT_MODE>
~ObjectAllocatorGen()523 ObjectAllocatorGen<MT_MODE>::~ObjectAllocatorGen()
524 {
525 // need to free the pool space when the allocator destroy
526 youngGenAllocator_->VisitAndRemoveAllPools(
527 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
528 delete youngGenAllocator_;
529 delete objectAllocator_;
530 delete largeObjectAllocator_;
531 delete humongousObjectAllocator_;
532 delete nonMovableObjectAllocator_;
533 delete largeNonMovableObjectAllocator_;
534 }
535 template <MTModeT MT_MODE>
GetRegularObjectMaxSize()536 size_t ObjectAllocatorGen<MT_MODE>::GetRegularObjectMaxSize()
537 {
538 return ObjectAllocator::GetMaxSize();
539 }
540
541 template <MTModeT MT_MODE>
GetLargeObjectMaxSize()542 size_t ObjectAllocatorGen<MT_MODE>::GetLargeObjectMaxSize()
543 {
544 return LargeObjectAllocator::GetMaxSize();
545 }
546
547 template <MTModeT MT_MODE>
IsObjectInYoungSpace(const ObjectHeader * obj)548 bool ObjectAllocatorGen<MT_MODE>::IsObjectInYoungSpace(const ObjectHeader *obj)
549 {
550 if (!youngGenAllocator_) {
551 return false;
552 }
553 return youngGenAllocator_->GetMemRange().IsAddressInRange(ToUintPtr(obj));
554 }
555
556 template <MTModeT MT_MODE>
IsIntersectedWithYoung(const MemRange & memRange)557 bool ObjectAllocatorGen<MT_MODE>::IsIntersectedWithYoung(const MemRange &memRange)
558 {
559 return youngGenAllocator_->GetMemRange().IsIntersect(memRange);
560 }
561
562 template <MTModeT MT_MODE>
IsObjectInNonMovableSpace(const ObjectHeader * obj)563 bool ObjectAllocatorGen<MT_MODE>::IsObjectInNonMovableSpace(const ObjectHeader *obj)
564 {
565 return nonMovableObjectAllocator_->ContainObject(obj);
566 }
567
568 template <MTModeT MT_MODE>
HasYoungSpace()569 bool ObjectAllocatorGen<MT_MODE>::HasYoungSpace()
570 {
571 return youngGenAllocator_ != nullptr;
572 }
573
574 template <MTModeT MT_MODE>
GetYoungSpaceMemRanges()575 const std::vector<MemRange> &ObjectAllocatorGen<MT_MODE>::GetYoungSpaceMemRanges()
576 {
577 return GetYoungRanges();
578 }
579
580 template <MTModeT MT_MODE>
GetYoungSpaceBitmaps()581 std::vector<MarkBitmap *> &ObjectAllocatorGen<MT_MODE>::GetYoungSpaceBitmaps()
582 {
583 static std::vector<MarkBitmap *> ret;
584 LOG(FATAL, ALLOC) << "GetYoungSpaceBitmaps not applicable for ObjectAllocatorGen";
585 return ret;
586 }
587
588 template <MTModeT MT_MODE>
ResetYoungAllocator()589 void ObjectAllocatorGen<MT_MODE>::ResetYoungAllocator()
590 {
591 MemStatsType *memStats = memStats_;
592 auto threadCallback = [&memStats](ManagedThread *thread) {
593 if (!PANDA_TRACK_TLAB_ALLOCATIONS && (thread->GetTLAB()->GetOccupiedSize() != 0)) {
594 memStats->RecordAllocateObject(thread->GetTLAB()->GetOccupiedSize(), SpaceType::SPACE_TYPE_OBJECT);
595 }
596 if (Runtime::GetOptions().IsAdaptiveTlabSize()) {
597 thread->GetWeightedTlabAverage()->ComputeNewSumAndResetSamples();
598 }
599 // Here we should not collect current TLAB fill statistics for adaptive size
600 // since it may not be completely filled before resetting
601 thread->ClearTLAB();
602 return true;
603 };
604 Thread::GetCurrent()->GetVM()->GetThreadManager()->EnumerateThreads(threadCallback);
605 youngGenAllocator_->Reset();
606 }
607
608 template <MTModeT MT_MODE>
CreateNewTLAB(size_t tlabSize)609 TLAB *ObjectAllocatorGen<MT_MODE>::CreateNewTLAB(size_t tlabSize)
610 {
611 TLAB *newTlab = youngGenAllocator_->CreateNewTLAB(tlabSize);
612 if (newTlab != nullptr) {
613 ASAN_UNPOISON_MEMORY_REGION(newTlab->GetStartAddr(), newTlab->GetSize());
614 MemoryInitialize(newTlab->GetStartAddr(), newTlab->GetSize());
615 ASAN_POISON_MEMORY_REGION(newTlab->GetStartAddr(), newTlab->GetSize());
616 }
617 return newTlab;
618 }
619
620 template <MTModeT MT_MODE>
GetTLABMaxAllocSize()621 size_t ObjectAllocatorGen<MT_MODE>::GetTLABMaxAllocSize()
622 {
623 if (Runtime::GetOptions().IsAdaptiveTlabSize()) {
624 return Runtime::GetOptions().GetMaxTlabSize();
625 }
626 return Runtime::GetOptions().GetInitTlabSize();
627 }
628
629 /* static */
630 template <MTModeT MT_MODE>
GetYoungAllocMaxSize()631 size_t ObjectAllocatorGen<MT_MODE>::GetYoungAllocMaxSize()
632 {
633 if (Runtime::GetOptions().IsAdaptiveTlabSize()) {
634 return Runtime::GetOptions().GetMaxTlabSize();
635 }
636 return Runtime::GetOptions().GetInitTlabSize();
637 }
638
639 template <MTModeT MT_MODE>
IterateOverObjectsInRange(MemRange memRange,const ObjectVisitor & objectVisitor)640 void ObjectAllocatorGen<MT_MODE>::IterateOverObjectsInRange(MemRange memRange, const ObjectVisitor &objectVisitor)
641 {
642 // we need ensure that the mem range related to a card must be located in one allocator
643 auto spaceType = PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(ToVoidPtr(memRange.GetStartAddress()));
644 auto allocInfo = PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ToVoidPtr(memRange.GetStartAddress()));
645 auto *allocator = allocInfo.GetAllocatorHeaderAddr();
646 switch (spaceType) {
647 case SpaceType::SPACE_TYPE_OBJECT:
648 if (allocator == objectAllocator_) {
649 objectAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
650 ToVoidPtr(memRange.GetEndAddress()));
651 } else if (allocator == pygoteSpaceAllocator_) {
652 pygoteSpaceAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
653 ToVoidPtr(memRange.GetEndAddress()));
654 } else if (allocator == &youngGenAllocator_) {
655 youngGenAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
656 ToVoidPtr(memRange.GetEndAddress()));
657 } else if (allocator == largeObjectAllocator_) {
658 largeObjectAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
659 ToVoidPtr(memRange.GetEndAddress()));
660 } else {
661 // if we reach this line, we may have an issue with multiVM CardTable iteration
662 UNREACHABLE();
663 }
664 break;
665 case SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT:
666 if (allocator == humongousObjectAllocator_) {
667 humongousObjectAllocator_->IterateOverObjectsInRange(
668 objectVisitor, ToVoidPtr(memRange.GetStartAddress()), ToVoidPtr(memRange.GetEndAddress()));
669 } else {
670 // if we reach this line, we may have an issue with multiVM CardTable iteration
671 UNREACHABLE();
672 }
673 break;
674 case SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT:
675 if (allocator == nonMovableObjectAllocator_) {
676 nonMovableObjectAllocator_->IterateOverObjectsInRange(
677 objectVisitor, ToVoidPtr(memRange.GetStartAddress()), ToVoidPtr(memRange.GetEndAddress()));
678 } else if (allocator == largeNonMovableObjectAllocator_) {
679 largeNonMovableObjectAllocator_->IterateOverObjectsInRange(
680 objectVisitor, ToVoidPtr(memRange.GetStartAddress()), ToVoidPtr(memRange.GetEndAddress()));
681 } else {
682 // if we reach this line, we may have an issue with multiVM CardTable iteration
683 UNREACHABLE();
684 }
685 break;
686 default:
687 // if we reach this line, we may have an issue with multiVM CardTable iteration
688 UNREACHABLE();
689 break;
690 }
691 }
692
693 template <MTModeT MT_MODE>
ContainObject(const ObjectHeader * obj) const694 bool ObjectAllocatorGen<MT_MODE>::ContainObject(const ObjectHeader *obj) const
695 {
696 if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
697 return true;
698 }
699 if (youngGenAllocator_->ContainObject(obj)) {
700 return true;
701 }
702 if (objectAllocator_->ContainObject(obj)) {
703 return true;
704 }
705 if (largeObjectAllocator_->ContainObject(obj)) {
706 return true;
707 }
708 if (humongousObjectAllocator_->ContainObject(obj)) {
709 return true;
710 }
711 if (nonMovableObjectAllocator_->ContainObject(obj)) {
712 return true;
713 }
714 if (largeNonMovableObjectAllocator_->ContainObject(obj)) {
715 return true;
716 }
717
718 return false;
719 }
720
721 template <MTModeT MT_MODE>
IsLive(const ObjectHeader * obj)722 bool ObjectAllocatorGen<MT_MODE>::IsLive(const ObjectHeader *obj)
723 {
724 if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
725 return pygoteSpaceAllocator_->IsLive(obj);
726 }
727 if (youngGenAllocator_->ContainObject(obj)) {
728 return youngGenAllocator_->IsLive(obj);
729 }
730 if (objectAllocator_->ContainObject(obj)) {
731 return objectAllocator_->IsLive(obj);
732 }
733 if (largeObjectAllocator_->ContainObject(obj)) {
734 return largeObjectAllocator_->IsLive(obj);
735 }
736 if (humongousObjectAllocator_->ContainObject(obj)) {
737 return humongousObjectAllocator_->IsLive(obj);
738 }
739 if (nonMovableObjectAllocator_->ContainObject(obj)) {
740 return nonMovableObjectAllocator_->IsLive(obj);
741 }
742 if (largeNonMovableObjectAllocator_->ContainObject(obj)) {
743 return largeNonMovableObjectAllocator_->IsLive(obj);
744 }
745
746 return false;
747 }
748
749 template <MTModeT MT_MODE>
UpdateSpaceData()750 void ObjectAllocatorGen<MT_MODE>::UpdateSpaceData()
751 {
752 GetYoungRanges().push_back(youngGenAllocator_->GetMemRange());
753 }
754
InvalidateSpaceData()755 void ObjectAllocatorGenBase::InvalidateSpaceData()
756 {
757 ranges_.clear();
758 youngBitmaps_.clear();
759 }
760
761 template class ObjectAllocatorGen<MT_MODE_SINGLE>;
762 template class ObjectAllocatorGen<MT_MODE_MULTI>;
763 template class ObjectAllocatorGen<MT_MODE_TASK>;
764 template class ObjectAllocatorNoGen<MT_MODE_SINGLE>;
765 template class ObjectAllocatorNoGen<MT_MODE_MULTI>;
766 template class ObjectAllocatorNoGen<MT_MODE_TASK>;
767
768 } // namespace ark::mem
769