1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 // These includes to avoid linker error:
16
17 #include "runtime/arch/memory_helpers.h"
18 #include "runtime/include/mem/allocator.h"
19 #include "runtime/include/mem/allocator-inl.h"
20 #include "mem/mem_pool.h"
21 #include "mem/mem_config.h"
22 #include "mem/mem.h"
23 #include "runtime/include/runtime.h"
24 #include "runtime/include/panda_vm.h"
25 #include "runtime/include/object_header.h"
26 #include "runtime/mem/bump-allocator-inl.h"
27 #include "runtime/mem/freelist_allocator-inl.h"
28 #include "runtime/mem/internal_allocator-inl.h"
29 #include "runtime/mem/runslots_allocator-inl.h"
30 #include "runtime/mem/pygote_space_allocator-inl.h"
31 #include "runtime/mem/tlab.h"
32
33 namespace panda::mem {
34
35 Allocator::~Allocator() = default;
36
ObjectAllocatorBase(MemStatsType * memStats,GCCollectMode gcCollectMode,bool createPygoteSpaceAllocator)37 ObjectAllocatorBase::ObjectAllocatorBase(MemStatsType *memStats, GCCollectMode gcCollectMode,
38 bool createPygoteSpaceAllocator)
39 : Allocator(memStats, AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT, gcCollectMode)
40 {
41 if (createPygoteSpaceAllocator) {
42 pygoteSpaceAllocator_ = new (std::nothrow) PygoteAllocator(memStats);
43 pygoteAllocEnabled_ = true;
44 }
45 }
46
~ObjectAllocatorBase()47 ObjectAllocatorBase::~ObjectAllocatorBase()
48 {
49 // NOLINTNEXTLINE(readability-delete-null-pointer)
50 if (pygoteSpaceAllocator_ != nullptr) {
51 delete pygoteSpaceAllocator_;
52 }
53 }
54
HaveEnoughPoolsInObjectSpace(size_t poolsNum) const55 bool ObjectAllocatorBase::HaveEnoughPoolsInObjectSpace(size_t poolsNum) const
56 {
57 auto memPool = PoolManager::GetMmapMemPool();
58 auto poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, PANDA_DEFAULT_ALLOCATOR_POOL_SIZE);
59 return memPool->HaveEnoughPoolsInObjectSpace(poolsNum, poolSize);
60 }
61
MemoryInitialize(void * mem,size_t size) const62 void ObjectAllocatorBase::MemoryInitialize(void *mem, size_t size) const
63 {
64 TSAN_ANNOTATE_IGNORE_WRITES_BEGIN();
65 memset_s(mem, size, 0, size);
66 TSAN_ANNOTATE_IGNORE_WRITES_END();
67 // zero init should be visible from other threads even if pointer to object was fetched
68 // without 'volatile' specifier so full memory barrier is required
69 // required by some language-specs
70 arch::FullMemoryBarrier();
71 }
72
ObjectMemoryInit(void * mem,size_t size) const73 void ObjectAllocatorBase::ObjectMemoryInit(void *mem, size_t size) const
74 {
75 if (mem == nullptr) {
76 return;
77 }
78 [[maybe_unused]] auto *object = static_cast<ObjectHeader *>(mem);
79 ASSERT(object->AtomicGetMark().GetValue() == 0);
80 ASSERT(object->ClassAddr<BaseClass *>() == nullptr);
81 ASSERT(size >= ObjectHeader::ObjectHeaderSize());
82 // zeroing according to newobj description in ISA
83 size_t sizeToInit = size - ObjectHeader::ObjectHeaderSize();
84 void *memToInit = ToVoidPtr(ToUintPtr(mem) + ObjectHeader::ObjectHeaderSize());
85 MemoryInitialize(memToInit, sizeToInit);
86 }
87
88 template <MTModeT MT_MODE>
ObjectAllocatorNoGen(MemStatsType * memStats,bool createPygoteSpaceAllocator)89 ObjectAllocatorNoGen<MT_MODE>::ObjectAllocatorNoGen(MemStatsType *memStats, bool createPygoteSpaceAllocator)
90 : ObjectAllocatorBase(memStats, GCCollectMode::GC_ALL, createPygoteSpaceAllocator)
91 {
92 const auto &options = Runtime::GetOptions();
93 heapSpace_.Initialize(MemConfig::GetInitialHeapSizeLimit(), MemConfig::GetHeapSizeLimit(),
94 options.GetMinHeapFreePercentage(), options.GetMaxHeapFreePercentage());
95 if (createPygoteSpaceAllocator) {
96 ASSERT(pygoteSpaceAllocator_ != nullptr);
97 pygoteSpaceAllocator_->SetHeapSpace(&heapSpace_);
98 }
99 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats);
100 largeObjectAllocator_ = new (std::nothrow) LargeObjectAllocator(memStats);
101 humongousObjectAllocator_ = new (std::nothrow) HumongousObjectAllocator(memStats);
102 }
103
104 template <MTModeT MT_MODE>
~ObjectAllocatorNoGen()105 ObjectAllocatorNoGen<MT_MODE>::~ObjectAllocatorNoGen()
106 {
107 delete objectAllocator_;
108 delete largeObjectAllocator_;
109 delete humongousObjectAllocator_;
110 }
111
112 template <MTModeT MT_MODE>
Allocate(size_t size,Alignment align,panda::ManagedThread * thread,ObjMemInitPolicy objInit)113 void *ObjectAllocatorNoGen<MT_MODE>::Allocate(size_t size, Alignment align,
114 [[maybe_unused]] panda::ManagedThread *thread, ObjMemInitPolicy objInit)
115 {
116 void *mem = nullptr;
117 size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
118 if (alignedSize <= ObjectAllocator::GetMaxSize()) {
119 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, ObjectAllocator::GetMinPoolSize());
120 mem = AllocateSafe(size, align, objectAllocator_, poolSize, SpaceType::SPACE_TYPE_OBJECT, &heapSpace_);
121 } else if (alignedSize <= LargeObjectAllocator::GetMaxSize()) {
122 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, LargeObjectAllocator::GetMinPoolSize());
123 mem = AllocateSafe(size, align, largeObjectAllocator_, poolSize, SpaceType::SPACE_TYPE_OBJECT, &heapSpace_);
124 } else {
125 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, HumongousObjectAllocator::GetMinPoolSize(size));
126 mem = AllocateSafe(size, align, humongousObjectAllocator_, poolSize, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT,
127 &heapSpace_);
128 }
129 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
130 ObjectMemoryInit(mem, size);
131 }
132 return mem;
133 }
134
135 template <MTModeT MT_MODE>
AllocateNonMovable(size_t size,Alignment align,panda::ManagedThread * thread,ObjMemInitPolicy objInit)136 void *ObjectAllocatorNoGen<MT_MODE>::AllocateNonMovable(size_t size, Alignment align, panda::ManagedThread *thread,
137 ObjMemInitPolicy objInit)
138 {
139 void *mem = nullptr;
140 // before pygote fork, allocate small non-movable objects in pygote space
141 if (UNLIKELY(IsPygoteAllocEnabled() && pygoteSpaceAllocator_->CanAllocNonMovable(size, align))) {
142 mem = pygoteSpaceAllocator_->Alloc(size, align);
143 } else {
144 // Without generations - no compaction now, so all allocations are non-movable
145 mem = Allocate(size, align, thread, objInit);
146 }
147 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
148 ObjectMemoryInit(mem, size);
149 }
150 return mem;
151 }
152
153 template <MTModeT MT_MODE>
CalculateAllocatorAlignment(size_t align)154 Alignment ObjectAllocatorNoGen<MT_MODE>::CalculateAllocatorAlignment(size_t align)
155 {
156 ASSERT(GetPurpose() == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT);
157 return GetAlignment(align);
158 }
159
160 template <MTModeT MT_MODE>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)161 void ObjectAllocatorNoGen<MT_MODE>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
162 {
163 if (pygoteSpaceAllocator_ != nullptr) {
164 pygoteSpaceAllocator_->VisitAndRemoveAllPools(memVisitor);
165 }
166 objectAllocator_->VisitAndRemoveAllPools(memVisitor);
167 largeObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
168 humongousObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
169 }
170
171 template <MTModeT MT_MODE>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)172 void ObjectAllocatorNoGen<MT_MODE>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
173 {
174 if (pygoteSpaceAllocator_ != nullptr) {
175 pygoteSpaceAllocator_->VisitAndRemoveFreePools(memVisitor);
176 }
177 objectAllocator_->VisitAndRemoveFreePools(memVisitor);
178 largeObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
179 humongousObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
180 }
181
182 template <MTModeT MT_MODE>
IterateOverObjects(const ObjectVisitor & objectVisitor)183 void ObjectAllocatorNoGen<MT_MODE>::IterateOverObjects(const ObjectVisitor &objectVisitor)
184 {
185 if (pygoteSpaceAllocator_ != nullptr) {
186 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
187 }
188 objectAllocator_->IterateOverObjects(objectVisitor);
189 largeObjectAllocator_->IterateOverObjects(objectVisitor);
190 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
191 }
192
193 template <MTModeT MT_MODE>
IterateRegularSizeObjects(const ObjectVisitor & objectVisitor)194 void ObjectAllocatorNoGen<MT_MODE>::IterateRegularSizeObjects(const ObjectVisitor &objectVisitor)
195 {
196 objectAllocator_->IterateOverObjects(objectVisitor);
197 }
198
199 template <MTModeT MT_MODE>
IterateNonRegularSizeObjects(const ObjectVisitor & objectVisitor)200 void ObjectAllocatorNoGen<MT_MODE>::IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor)
201 {
202 if (pygoteSpaceAllocator_ != nullptr) {
203 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
204 }
205 largeObjectAllocator_->IterateOverObjects(objectVisitor);
206 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
207 }
208
209 template <MTModeT MT_MODE>
FreeObjectsMovedToPygoteSpace()210 void ObjectAllocatorNoGen<MT_MODE>::FreeObjectsMovedToPygoteSpace()
211 {
212 // clear because we have move all objects in it to pygote space
213 objectAllocator_->VisitAndRemoveAllPools(
214 [](void *mem, size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
215 delete objectAllocator_;
216 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats_);
217 }
218
219 template <MTModeT MT_MODE>
Collect(const GCObjectVisitor & gcObjectVisitor,GCCollectMode collectMode)220 void ObjectAllocatorNoGen<MT_MODE>::Collect(const GCObjectVisitor &gcObjectVisitor,
221 [[maybe_unused]] GCCollectMode collectMode)
222 {
223 if (pygoteSpaceAllocator_ != nullptr) {
224 pygoteSpaceAllocator_->Collect(gcObjectVisitor);
225 }
226 objectAllocator_->Collect(gcObjectVisitor);
227 largeObjectAllocator_->Collect(gcObjectVisitor);
228 humongousObjectAllocator_->Collect(gcObjectVisitor);
229 }
230
231 // if there is a common base class for these allocators, we could split this func and return the pointer to the
232 // allocator containing the object
233 template <MTModeT MT_MODE>
ContainObject(const ObjectHeader * obj) const234 bool ObjectAllocatorNoGen<MT_MODE>::ContainObject(const ObjectHeader *obj) const
235 {
236 if (objectAllocator_->ContainObject(obj)) {
237 return true;
238 }
239 if (largeObjectAllocator_->ContainObject(obj)) {
240 return true;
241 }
242 if (humongousObjectAllocator_->ContainObject(obj)) {
243 return true;
244 }
245
246 return false;
247 }
248
249 template <MTModeT MT_MODE>
IsLive(const ObjectHeader * obj)250 bool ObjectAllocatorNoGen<MT_MODE>::IsLive(const ObjectHeader *obj)
251 {
252 if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
253 return pygoteSpaceAllocator_->IsLive(obj);
254 }
255 if (objectAllocator_->ContainObject(obj)) {
256 return objectAllocator_->IsLive(obj);
257 }
258 if (largeObjectAllocator_->ContainObject(obj)) {
259 return largeObjectAllocator_->IsLive(obj);
260 }
261 if (humongousObjectAllocator_->ContainObject(obj)) {
262 return humongousObjectAllocator_->IsLive(obj);
263 }
264 return false;
265 }
266
267 template <MTModeT MT_MODE>
Allocate(size_t size,Alignment align,panda::ManagedThread * thread,ObjMemInitPolicy objInit)268 void *ObjectAllocatorGen<MT_MODE>::Allocate(size_t size, Alignment align, [[maybe_unused]] panda::ManagedThread *thread,
269 ObjMemInitPolicy objInit)
270 {
271 void *mem = nullptr;
272 size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
273 if (LIKELY(alignedSize <= YOUNG_ALLOC_MAX_SIZE)) {
274 mem = youngGenAllocator_->Alloc(size, align);
275 } else {
276 mem = AllocateTenured(size);
277 }
278 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
279 ObjectMemoryInit(mem, size);
280 }
281 return mem;
282 }
283
284 template <MTModeT MT_MODE>
AllocateNonMovable(size_t size,Alignment align,panda::ManagedThread * thread,ObjMemInitPolicy objInit)285 void *ObjectAllocatorGen<MT_MODE>::AllocateNonMovable(size_t size, Alignment align,
286 [[maybe_unused]] panda::ManagedThread *thread,
287 ObjMemInitPolicy objInit)
288 {
289 void *mem = nullptr;
290 // before pygote fork, allocate small non-movable objects in pygote space
291 if (UNLIKELY(IsPygoteAllocEnabled() && pygoteSpaceAllocator_->CanAllocNonMovable(size, align))) {
292 mem = pygoteSpaceAllocator_->Alloc(size, align);
293 } else {
294 size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
295 if (alignedSize <= ObjectAllocator::GetMaxSize()) {
296 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, ObjectAllocator::GetMinPoolSize());
297 mem = AllocateSafe(size, align, nonMovableObjectAllocator_, poolSize,
298 SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT, &heapSpaces_);
299 } else if (alignedSize <= LargeObjectAllocator::GetMaxSize()) {
300 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, LargeObjectAllocator::GetMinPoolSize());
301 mem = AllocateSafe(size, align, largeNonMovableObjectAllocator_, poolSize,
302 SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT, &heapSpaces_);
303 } else {
304 // We don't need special allocator for this
305 // Humongous objects are non-movable
306 size_t poolSize = std::max(PANDA_DEFAULT_POOL_SIZE, HumongousObjectAllocator::GetMinPoolSize(size));
307 mem = AllocateSafe(size, align, humongousObjectAllocator_, poolSize, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT,
308 &heapSpaces_);
309 }
310 }
311 if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
312 ObjectMemoryInit(mem, size);
313 }
314 return mem;
315 }
316
317 template <MTModeT MT_MODE>
CalculateAllocatorAlignment(size_t align)318 Alignment ObjectAllocatorGen<MT_MODE>::CalculateAllocatorAlignment(size_t align)
319 {
320 ASSERT(GetPurpose() == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT);
321 return GetAlignment(align);
322 }
323
324 template <MTModeT MT_MODE>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)325 void ObjectAllocatorGen<MT_MODE>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
326 {
327 if (pygoteSpaceAllocator_ != nullptr) {
328 pygoteSpaceAllocator_->VisitAndRemoveAllPools(memVisitor);
329 }
330 objectAllocator_->VisitAndRemoveAllPools(memVisitor);
331 largeObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
332 humongousObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
333 nonMovableObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
334 largeNonMovableObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
335 }
336
337 template <MTModeT MT_MODE>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)338 void ObjectAllocatorGen<MT_MODE>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
339 {
340 if (pygoteSpaceAllocator_ != nullptr) {
341 pygoteSpaceAllocator_->VisitAndRemoveFreePools(memVisitor);
342 }
343 objectAllocator_->VisitAndRemoveFreePools(memVisitor);
344 largeObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
345 humongousObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
346 nonMovableObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
347 largeNonMovableObjectAllocator_->VisitAndRemoveFreePools(memVisitor);
348 }
349
350 template <MTModeT MT_MODE>
IterateOverYoungObjects(const ObjectVisitor & objectVisitor)351 void ObjectAllocatorGen<MT_MODE>::IterateOverYoungObjects(const ObjectVisitor &objectVisitor)
352 {
353 youngGenAllocator_->IterateOverObjects(objectVisitor);
354 }
355
356 template <MTModeT MT_MODE>
IterateOverTenuredObjects(const ObjectVisitor & objectVisitor)357 void ObjectAllocatorGen<MT_MODE>::IterateOverTenuredObjects(const ObjectVisitor &objectVisitor)
358 {
359 if (pygoteSpaceAllocator_ != nullptr) {
360 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
361 }
362 objectAllocator_->IterateOverObjects(objectVisitor);
363 largeObjectAllocator_->IterateOverObjects(objectVisitor);
364 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
365 nonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
366 largeNonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
367 }
368
369 template <MTModeT MT_MODE>
IterateOverObjects(const ObjectVisitor & objectVisitor)370 void ObjectAllocatorGen<MT_MODE>::IterateOverObjects(const ObjectVisitor &objectVisitor)
371 {
372 if (pygoteSpaceAllocator_ != nullptr) {
373 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
374 }
375 youngGenAllocator_->IterateOverObjects(objectVisitor);
376 objectAllocator_->IterateOverObjects(objectVisitor);
377 largeObjectAllocator_->IterateOverObjects(objectVisitor);
378 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
379 nonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
380 largeNonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
381 }
382
383 template <MTModeT MT_MODE>
IterateRegularSizeObjects(const ObjectVisitor & objectVisitor)384 void ObjectAllocatorGen<MT_MODE>::IterateRegularSizeObjects(const ObjectVisitor &objectVisitor)
385 {
386 objectAllocator_->IterateOverObjects(objectVisitor);
387 }
388
389 template <MTModeT MT_MODE>
IterateNonRegularSizeObjects(const ObjectVisitor & objectVisitor)390 void ObjectAllocatorGen<MT_MODE>::IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor)
391 {
392 if (pygoteSpaceAllocator_ != nullptr) {
393 pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
394 }
395 largeObjectAllocator_->IterateOverObjects(objectVisitor);
396 humongousObjectAllocator_->IterateOverObjects(objectVisitor);
397 nonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
398 largeNonMovableObjectAllocator_->IterateOverObjects(objectVisitor);
399 }
400
401 template <MTModeT MT_MODE>
FreeObjectsMovedToPygoteSpace()402 void ObjectAllocatorGen<MT_MODE>::FreeObjectsMovedToPygoteSpace()
403 {
404 // clear because we have move all objects in it to pygote space
405 objectAllocator_->VisitAndRemoveAllPools(
406 [](void *mem, size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
407 delete objectAllocator_;
408 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats_);
409 }
410
411 template <MTModeT MT_MODE>
Collect(const GCObjectVisitor & gcObjectVisitor,GCCollectMode collectMode)412 void ObjectAllocatorGen<MT_MODE>::Collect(const GCObjectVisitor &gcObjectVisitor, GCCollectMode collectMode)
413 {
414 switch (collectMode) {
415 case GCCollectMode::GC_MINOR:
416 break;
417 case GCCollectMode::GC_ALL:
418 case GCCollectMode::GC_MAJOR:
419 if (pygoteSpaceAllocator_ != nullptr) {
420 pygoteSpaceAllocator_->Collect(gcObjectVisitor);
421 }
422 objectAllocator_->Collect(gcObjectVisitor);
423 largeObjectAllocator_->Collect(gcObjectVisitor);
424 humongousObjectAllocator_->Collect(gcObjectVisitor);
425 nonMovableObjectAllocator_->Collect(gcObjectVisitor);
426 largeNonMovableObjectAllocator_->Collect(gcObjectVisitor);
427 break;
428 case GCCollectMode::GC_FULL:
429 UNREACHABLE();
430 break;
431 case GC_NONE:
432 UNREACHABLE();
433 break;
434 default:
435 UNREACHABLE();
436 }
437 }
438
439 template <MTModeT MT_MODE>
GetRegularObjectMaxSize()440 size_t ObjectAllocatorNoGen<MT_MODE>::GetRegularObjectMaxSize()
441 {
442 return ObjectAllocator::GetMaxSize();
443 }
444
445 template <MTModeT MT_MODE>
GetLargeObjectMaxSize()446 size_t ObjectAllocatorNoGen<MT_MODE>::GetLargeObjectMaxSize()
447 {
448 return LargeObjectAllocator::GetMaxSize();
449 }
450
451 template <MTModeT MT_MODE>
CreateNewTLAB(panda::ManagedThread * thread)452 TLAB *ObjectAllocatorNoGen<MT_MODE>::CreateNewTLAB([[maybe_unused]] panda::ManagedThread *thread)
453 {
454 LOG(FATAL, ALLOC) << "TLAB is not supported for this allocator";
455 return nullptr;
456 }
457
458 template <MTModeT MT_MODE>
GetTLABMaxAllocSize()459 size_t ObjectAllocatorNoGen<MT_MODE>::GetTLABMaxAllocSize()
460 {
461 // NOTE(aemelenko): TLAB usage is not supported for non-gen GCs.
462 return 0;
463 }
464
ObjectAllocatorGenBase(MemStatsType * memStats,GCCollectMode gcCollectMode,bool createPygoteSpaceAllocator)465 ObjectAllocatorGenBase::ObjectAllocatorGenBase(MemStatsType *memStats, GCCollectMode gcCollectMode,
466 bool createPygoteSpaceAllocator)
467 : ObjectAllocatorBase(memStats, gcCollectMode, createPygoteSpaceAllocator)
468 {
469 const auto &options = Runtime::GetOptions();
470 heapSpaces_.Initialize(options.GetInitYoungSpaceSize(), options.WasSetInitYoungSpaceSize(),
471 options.GetYoungSpaceSize(), options.WasSetYoungSpaceSize(),
472 MemConfig::GetInitialHeapSizeLimit(), MemConfig::GetHeapSizeLimit(),
473 options.GetMinHeapFreePercentage(), options.GetMaxHeapFreePercentage());
474 if (createPygoteSpaceAllocator) {
475 ASSERT(pygoteSpaceAllocator_ != nullptr);
476 pygoteSpaceAllocator_->SetHeapSpace(&heapSpaces_);
477 }
478 }
479
480 template <MTModeT MT_MODE>
ObjectAllocatorGen(MemStatsType * memStats,bool createPygoteSpaceAllocator)481 ObjectAllocatorGen<MT_MODE>::ObjectAllocatorGen(MemStatsType *memStats, bool createPygoteSpaceAllocator)
482 : ObjectAllocatorGenBase(memStats, GCCollectMode::GC_ALL, createPygoteSpaceAllocator)
483 {
484 // For Gen-GC we use alone pool for young space, so we will use full such pool
485 heapSpaces_.UseFullYoungSpace();
486 size_t youngSpaceSize = heapSpaces_.GetCurrentYoungSize();
487 auto youngSharedSpaceSize = Runtime::GetOptions().GetYoungSharedSpaceSize();
488 ASSERT(youngSpaceSize >= youngSharedSpaceSize);
489 size_t tlabsCountInYoungGen;
490 if constexpr (MT_MODE == MT_MODE_SINGLE) {
491 // For single-threaded VMs allocate whole private young space for TLAB
492 tlabSize_ = youngSpaceSize - youngSharedSpaceSize;
493 tlabsCountInYoungGen = 1;
494 } else {
495 tlabsCountInYoungGen = (youngSpaceSize - youngSharedSpaceSize) / DEFAULT_YOUNG_TLAB_SIZE;
496 ASSERT(((youngSpaceSize - youngSharedSpaceSize) % DEFAULT_YOUNG_TLAB_SIZE) == 0);
497 }
498 ASSERT(YOUNG_ALLOC_MAX_SIZE <= tlabSize_);
499 ASSERT(tlabsCountInYoungGen * tlabSize_ <= youngSpaceSize);
500
501 // NOTE(aemelenko): Missed an allocator pointer
502 // because we construct BumpPointer Allocator after calling AllocArena method
503 auto youngPool = heapSpaces_.AllocAlonePoolForYoung(SpaceType::SPACE_TYPE_OBJECT,
504 YoungGenAllocator::GetAllocatorType(), &youngGenAllocator_);
505 youngGenAllocator_ = new (std::nothrow)
506 YoungGenAllocator(std::move(youngPool), SpaceType::SPACE_TYPE_OBJECT, memStats, tlabsCountInYoungGen);
507 objectAllocator_ = new (std::nothrow) ObjectAllocator(memStats);
508 largeObjectAllocator_ = new (std::nothrow) LargeObjectAllocator(memStats);
509 humongousObjectAllocator_ = new (std::nothrow) HumongousObjectAllocator(memStats);
510 nonMovableObjectAllocator_ = new (std::nothrow) ObjectAllocator(memStats, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
511 largeNonMovableObjectAllocator_ =
512 new (std::nothrow) LargeObjectAllocator(memStats, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
513 memStats_ = memStats;
514 GetYoungRanges().push_back({0, 0});
515 }
516
517 template <MTModeT MT_MODE>
~ObjectAllocatorGen()518 ObjectAllocatorGen<MT_MODE>::~ObjectAllocatorGen()
519 {
520 // need to free the pool space when the allocator destroy
521 youngGenAllocator_->VisitAndRemoveAllPools(
522 [](void *mem, [[maybe_unused]] size_t size) { PoolManager::GetMmapMemPool()->FreePool(mem, size); });
523 delete youngGenAllocator_;
524 delete objectAllocator_;
525 delete largeObjectAllocator_;
526 delete humongousObjectAllocator_;
527 delete nonMovableObjectAllocator_;
528 delete largeNonMovableObjectAllocator_;
529 }
530 template <MTModeT MT_MODE>
GetRegularObjectMaxSize()531 size_t ObjectAllocatorGen<MT_MODE>::GetRegularObjectMaxSize()
532 {
533 return ObjectAllocator::GetMaxSize();
534 }
535
536 template <MTModeT MT_MODE>
GetLargeObjectMaxSize()537 size_t ObjectAllocatorGen<MT_MODE>::GetLargeObjectMaxSize()
538 {
539 return LargeObjectAllocator::GetMaxSize();
540 }
541
542 template <MTModeT MT_MODE>
IsObjectInYoungSpace(const ObjectHeader * obj)543 bool ObjectAllocatorGen<MT_MODE>::IsObjectInYoungSpace(const ObjectHeader *obj)
544 {
545 if (!youngGenAllocator_) {
546 return false;
547 }
548 return youngGenAllocator_->GetMemRange().IsAddressInRange(ToUintPtr(obj));
549 }
550
551 template <MTModeT MT_MODE>
IsIntersectedWithYoung(const MemRange & memRange)552 bool ObjectAllocatorGen<MT_MODE>::IsIntersectedWithYoung(const MemRange &memRange)
553 {
554 return youngGenAllocator_->GetMemRange().IsIntersect(memRange);
555 }
556
557 template <MTModeT MT_MODE>
IsObjectInNonMovableSpace(const ObjectHeader * obj)558 bool ObjectAllocatorGen<MT_MODE>::IsObjectInNonMovableSpace(const ObjectHeader *obj)
559 {
560 return nonMovableObjectAllocator_->ContainObject(obj);
561 }
562
563 template <MTModeT MT_MODE>
HasYoungSpace()564 bool ObjectAllocatorGen<MT_MODE>::HasYoungSpace()
565 {
566 return youngGenAllocator_ != nullptr;
567 }
568
569 template <MTModeT MT_MODE>
GetYoungSpaceMemRanges()570 const std::vector<MemRange> &ObjectAllocatorGen<MT_MODE>::GetYoungSpaceMemRanges()
571 {
572 return GetYoungRanges();
573 }
574
575 template <MTModeT MT_MODE>
GetYoungSpaceBitmaps()576 std::vector<MarkBitmap *> &ObjectAllocatorGen<MT_MODE>::GetYoungSpaceBitmaps()
577 {
578 static std::vector<MarkBitmap *> ret;
579 LOG(FATAL, ALLOC) << "GetYoungSpaceBitmaps not applicable for ObjectAllocatorGen";
580 return ret;
581 }
582
583 template <MTModeT MT_MODE>
ResetYoungAllocator()584 void ObjectAllocatorGen<MT_MODE>::ResetYoungAllocator()
585 {
586 MemStatsType *memStats = memStats_;
587 auto threadCallback = [&memStats](ManagedThread *thread) {
588 if (!PANDA_TRACK_TLAB_ALLOCATIONS && (thread->GetTLAB()->GetOccupiedSize() != 0)) {
589 memStats->RecordAllocateObject(thread->GetTLAB()->GetOccupiedSize(), SpaceType::SPACE_TYPE_OBJECT);
590 }
591 thread->ClearTLAB();
592 return true;
593 };
594 Thread::GetCurrent()->GetVM()->GetThreadManager()->EnumerateThreads(threadCallback);
595 youngGenAllocator_->Reset();
596 }
597
598 template <MTModeT MT_MODE>
CreateNewTLAB(panda::ManagedThread * thread)599 TLAB *ObjectAllocatorGen<MT_MODE>::CreateNewTLAB([[maybe_unused]] panda::ManagedThread *thread)
600 {
601 TLAB *newTlab = youngGenAllocator_->CreateNewTLAB(tlabSize_);
602 if (newTlab != nullptr) {
603 ASAN_UNPOISON_MEMORY_REGION(newTlab->GetStartAddr(), newTlab->GetSize());
604 MemoryInitialize(newTlab->GetStartAddr(), newTlab->GetSize());
605 ASAN_POISON_MEMORY_REGION(newTlab->GetStartAddr(), newTlab->GetSize());
606 }
607 return newTlab;
608 }
609
610 template <MTModeT MT_MODE>
GetTLABMaxAllocSize()611 size_t ObjectAllocatorGen<MT_MODE>::GetTLABMaxAllocSize()
612 {
613 return YOUNG_ALLOC_MAX_SIZE;
614 }
615
616 template <MTModeT MT_MODE>
IterateOverObjectsInRange(MemRange memRange,const ObjectVisitor & objectVisitor)617 void ObjectAllocatorGen<MT_MODE>::IterateOverObjectsInRange(MemRange memRange, const ObjectVisitor &objectVisitor)
618 {
619 // we need ensure that the mem range related to a card must be located in one allocator
620 auto spaceType = PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(ToVoidPtr(memRange.GetStartAddress()));
621 auto allocInfo = PoolManager::GetMmapMemPool()->GetAllocatorInfoForAddr(ToVoidPtr(memRange.GetStartAddress()));
622 auto *allocator = allocInfo.GetAllocatorHeaderAddr();
623 switch (spaceType) {
624 case SpaceType::SPACE_TYPE_OBJECT:
625 if (allocator == objectAllocator_) {
626 objectAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
627 ToVoidPtr(memRange.GetEndAddress()));
628 } else if (allocator == pygoteSpaceAllocator_) {
629 pygoteSpaceAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
630 ToVoidPtr(memRange.GetEndAddress()));
631 } else if (allocator == &youngGenAllocator_) {
632 youngGenAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
633 ToVoidPtr(memRange.GetEndAddress()));
634 } else if (allocator == largeObjectAllocator_) {
635 largeObjectAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
636 ToVoidPtr(memRange.GetEndAddress()));
637 } else {
638 // if we reach this line, we may have an issue with multiVM CardTable iteration
639 UNREACHABLE();
640 }
641 break;
642 case SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT:
643 if (allocator == humongousObjectAllocator_) {
644 humongousObjectAllocator_->IterateOverObjectsInRange(
645 objectVisitor, ToVoidPtr(memRange.GetStartAddress()), ToVoidPtr(memRange.GetEndAddress()));
646 } else {
647 // if we reach this line, we may have an issue with multiVM CardTable iteration
648 UNREACHABLE();
649 }
650 break;
651 case SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT:
652 if (allocator == nonMovableObjectAllocator_) {
653 nonMovableObjectAllocator_->IterateOverObjectsInRange(
654 objectVisitor, ToVoidPtr(memRange.GetStartAddress()), ToVoidPtr(memRange.GetEndAddress()));
655 } else if (allocator == largeNonMovableObjectAllocator_) {
656 largeNonMovableObjectAllocator_->IterateOverObjectsInRange(
657 objectVisitor, ToVoidPtr(memRange.GetStartAddress()), ToVoidPtr(memRange.GetEndAddress()));
658 } else {
659 // if we reach this line, we may have an issue with multiVM CardTable iteration
660 UNREACHABLE();
661 }
662 break;
663 default:
664 // if we reach this line, we may have an issue with multiVM CardTable iteration
665 UNREACHABLE();
666 break;
667 }
668 }
669
670 template <MTModeT MT_MODE>
ContainObject(const ObjectHeader * obj) const671 bool ObjectAllocatorGen<MT_MODE>::ContainObject(const ObjectHeader *obj) const
672 {
673 if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
674 return true;
675 }
676 if (youngGenAllocator_->ContainObject(obj)) {
677 return true;
678 }
679 if (objectAllocator_->ContainObject(obj)) {
680 return true;
681 }
682 if (largeObjectAllocator_->ContainObject(obj)) {
683 return true;
684 }
685 if (humongousObjectAllocator_->ContainObject(obj)) {
686 return true;
687 }
688 if (nonMovableObjectAllocator_->ContainObject(obj)) {
689 return true;
690 }
691 if (largeNonMovableObjectAllocator_->ContainObject(obj)) {
692 return true;
693 }
694
695 return false;
696 }
697
698 template <MTModeT MT_MODE>
IsLive(const ObjectHeader * obj)699 bool ObjectAllocatorGen<MT_MODE>::IsLive(const ObjectHeader *obj)
700 {
701 if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
702 return pygoteSpaceAllocator_->IsLive(obj);
703 }
704 if (youngGenAllocator_->ContainObject(obj)) {
705 return youngGenAllocator_->IsLive(obj);
706 }
707 if (objectAllocator_->ContainObject(obj)) {
708 return objectAllocator_->IsLive(obj);
709 }
710 if (largeObjectAllocator_->ContainObject(obj)) {
711 return largeObjectAllocator_->IsLive(obj);
712 }
713 if (humongousObjectAllocator_->ContainObject(obj)) {
714 return humongousObjectAllocator_->IsLive(obj);
715 }
716 if (nonMovableObjectAllocator_->ContainObject(obj)) {
717 return nonMovableObjectAllocator_->IsLive(obj);
718 }
719 if (largeNonMovableObjectAllocator_->ContainObject(obj)) {
720 return largeNonMovableObjectAllocator_->IsLive(obj);
721 }
722
723 return false;
724 }
725
726 template <MTModeT MT_MODE>
UpdateSpaceData()727 void ObjectAllocatorGen<MT_MODE>::UpdateSpaceData()
728 {
729 GetYoungRanges().push_back(youngGenAllocator_->GetMemRange());
730 }
731
InvalidateSpaceData()732 void ObjectAllocatorGenBase::InvalidateSpaceData()
733 {
734 ranges_.clear();
735 youngBitmaps_.clear();
736 }
737
738 template class ObjectAllocatorGen<MT_MODE_SINGLE>;
739 template class ObjectAllocatorGen<MT_MODE_MULTI>;
740 template class ObjectAllocatorNoGen<MT_MODE_SINGLE>;
741 template class ObjectAllocatorNoGen<MT_MODE_MULTI>;
742
743 } // namespace panda::mem
744