1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #ifndef PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
16 #define PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
17
18 #include "libpandabase/mem/mem.h"
19 #include "libpandabase/utils/logger.h"
20 #include "runtime/include/runtime.h"
21 #include "runtime/include/thread.h"
22 #include "runtime/include/gc_task.h"
23 #include "runtime/mem/region_allocator.h"
24 #include "runtime/mem/region_space-inl.h"
25 #include "runtime/mem/runslots_allocator-inl.h"
26 #include "runtime/mem/freelist_allocator-inl.h"
27 #include "runtime/mem/alloc_config.h"
28 #include "runtime/arch/memory_helpers.h"
29
30 namespace panda::mem {
31
32 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * mem_stats,GenerationalSpaces * spaces,SpaceType space_type,AllocatorType allocator_type,size_t init_space_size,bool extend,size_t region_size)33 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *mem_stats, GenerationalSpaces *spaces,
34 SpaceType space_type, AllocatorType allocator_type,
35 size_t init_space_size, bool extend, size_t region_size)
36 : mem_stats_(mem_stats),
37 space_type_(space_type),
38 spaces_(spaces),
39 region_pool_(region_size, extend, spaces,
40 InternalAllocatorPtr(InternalAllocator<>::GetInternalAllocatorFromRuntime())),
41 region_space_(space_type, allocator_type, ®ion_pool_),
42 init_block_(0, nullptr)
43 {
44 ASSERT(space_type_ == SpaceType::SPACE_TYPE_OBJECT || space_type_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT ||
45 space_type_ == SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
46 init_block_ = NULLPOOL;
47 if (init_space_size > 0) {
48 ASSERT(init_space_size % region_size == 0);
49 init_block_ = spaces_->AllocSharedPool(init_space_size, space_type, AllocatorType::REGION_ALLOCATOR, this);
50 ASSERT(init_block_.GetMem() != nullptr);
51 ASSERT(init_block_.GetSize() >= init_space_size);
52 if (init_block_.GetMem() != nullptr) {
53 region_pool_.InitRegionBlock(ToUintPtr(init_block_.GetMem()),
54 ToUintPtr(init_block_.GetMem()) + init_space_size);
55 ASAN_POISON_MEMORY_REGION(init_block_.GetMem(), init_block_.GetSize());
56 }
57 }
58 }
59
60 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * mem_stats,GenerationalSpaces * spaces,SpaceType space_type,AllocatorType allocator_type,RegionPool * shared_region_pool)61 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *mem_stats, GenerationalSpaces *spaces,
62 SpaceType space_type, AllocatorType allocator_type,
63 RegionPool *shared_region_pool)
64 : mem_stats_(mem_stats),
65 spaces_(spaces),
66 space_type_(space_type),
67 region_pool_(0, false, spaces, nullptr), // unused
68 region_space_(space_type, allocator_type, shared_region_pool),
69 init_block_(0, nullptr) // unused
70 {
71 ASSERT(space_type_ == SpaceType::SPACE_TYPE_OBJECT || space_type_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
72 }
73
74 template <typename LockConfigT>
75 template <typename AllocConfigT>
CreateAndSetUpNewRegion(size_t region_size,RegionFlag region_type,RegionFlag properties)76 Region *RegionAllocatorBase<LockConfigT>::CreateAndSetUpNewRegion(size_t region_size, RegionFlag region_type,
77 RegionFlag properties)
78 {
79 Region *region = AllocRegion(region_size, region_type, properties);
80 if (LIKELY(region != nullptr)) {
81 if (region_type == RegionFlag::IS_EDEN) {
82 AllocConfigT::OnInitYoungRegion({region->Begin(), region->End()});
83 }
84 // Do memory barrier here to make sure all threads see references to bitmaps.
85 // The situation:
86 // A mutator thread allocates a new object. During object allocation the mutator
87 // allocates a new region, sets up the region header, allocates object in the region and publishes
88 // the reference to the object.
89 // GC thread does concurrent marking. It sees the reference to the new object and gets the region
90 // by the object address.
91 // Since GC thread doesn't locks region_lock_ we need to do memory barrier here to make
92 // sure GC thread sees all bitmaps from the region header.
93 arch::FullMemoryBarrier();
94 // Getting region by object is a bit operation and TSAN doesn't
95 // sees the relation between region creation and region access.
96 // This annotation suggests TSAN that this code always executes before
97 // the region will be accessed.
98 // See the corresponding annotation in ObjectToRegion
99 TSAN_ANNOTATE_HAPPENS_BEFORE(region);
100 }
101 return region;
102 }
103
104 template <typename LockConfigT>
GetAllRegions()105 PandaVector<Region *> RegionAllocatorBase<LockConfigT>::GetAllRegions()
106 {
107 PandaVector<Region *> vector;
108 os::memory::LockHolder lock(this->region_lock_);
109 GetSpace()->IterateRegions([&](Region *region) { vector.push_back(region); });
110 return vector;
111 }
112
113 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * mem_stats,GenerationalSpaces * spaces,SpaceType space_type,size_t init_space_size,bool extend)114 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *mem_stats, GenerationalSpaces *spaces,
115 SpaceType space_type, size_t init_space_size, bool extend)
116 : RegionAllocatorBase<LockConfigT>(mem_stats, spaces, space_type, AllocatorType::REGION_ALLOCATOR, init_space_size,
117 extend, REGION_SIZE),
118 full_region_(nullptr, 0, 0),
119 eden_current_region_(&full_region_)
120 {
121 }
122
123 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * mem_stats,GenerationalSpaces * spaces,SpaceType space_type,RegionPool * shared_region_pool)124 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *mem_stats, GenerationalSpaces *spaces,
125 SpaceType space_type, RegionPool *shared_region_pool)
126 : RegionAllocatorBase<LockConfigT>(mem_stats, spaces, space_type, AllocatorType::REGION_ALLOCATOR,
127 shared_region_pool),
128 full_region_(nullptr, 0, 0),
129 eden_current_region_(&full_region_)
130 {
131 }
132
133 template <typename AllocConfigT, typename LockConfigT>
134 template <RegionFlag region_type>
AllocRegular(size_t align_size)135 void *RegionAllocator<AllocConfigT, LockConfigT>::AllocRegular(size_t align_size)
136 {
137 static constexpr bool is_atomic = std::is_same_v<LockConfigT, RegionAllocatorLockConfig::CommonLock>;
138 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
139 if constexpr (region_type == RegionFlag::IS_EDEN) {
140 void *mem = GetCurrentRegion<is_atomic, region_type>()->template Alloc<is_atomic>(align_size);
141 if (mem != nullptr) {
142 return mem;
143 }
144
145 os::memory::LockHolder lock(this->region_lock_);
146 mem = GetCurrentRegion<is_atomic, region_type>()->template Alloc<is_atomic>(align_size);
147 if (mem != nullptr) {
148 return mem;
149 }
150
151 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, region_type);
152 if (LIKELY(region != nullptr)) {
153 mem = region->template Alloc<false>(align_size);
154 SetCurrentRegion<is_atomic, region_type>(region);
155 }
156
157 return mem;
158 }
159 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
160 if constexpr (region_type == RegionFlag::IS_OLD) {
161 void *mem = nullptr;
162 Region *region_to = PopFromRegionQueue<is_atomic, region_type>();
163 if (region_to != nullptr) {
164 mem = region_to->template Alloc<false>(align_size);
165 if (mem != nullptr) {
166 PushToRegionQueue<is_atomic, region_type>(region_to);
167 return mem;
168 }
169 }
170
171 os::memory::LockHolder lock(this->region_lock_);
172 region_to = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, region_type);
173 if (LIKELY(region_to != nullptr)) {
174 mem = region_to->template Alloc<false>(align_size);
175 PushToRegionQueue<is_atomic, region_type>(region_to);
176 }
177
178 return mem;
179 }
180 UNREACHABLE();
181 return nullptr;
182 }
183
184 template <typename AllocConfigT, typename LockConfigT>
185 template <RegionFlag region_type, bool update_memstats>
Alloc(size_t size,Alignment align)186 void *RegionAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
187 {
188 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
189 size_t align_size = AlignUp(size, GetAlignmentInBytes(align));
190 void *mem = nullptr;
191 // for movable & regular size object, allocate it from a region
192 // for nonmovable or large size object, allocate a seprate large region for it
193 if (this->GetSpaceType() != SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT &&
194 LIKELY(align_size <= GetMaxRegularObjectSize())) {
195 mem = AllocRegular<region_type>(align_size);
196 } else {
197 os::memory::LockHolder lock(this->region_lock_);
198 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(
199 Region::RegionSize(align_size, REGION_SIZE), region_type, IS_LARGE_OBJECT);
200 if (LIKELY(region != nullptr)) {
201 mem = region->Alloc<false>(align_size);
202 }
203 }
204 if (mem != nullptr) {
205 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
206 if constexpr (update_memstats) {
207 AllocConfigT::OnAlloc(align_size, this->space_type_, this->mem_stats_);
208 AllocConfigT::MemoryInit(mem, size);
209 }
210 }
211 return mem;
212 }
213
214 template <typename AllocConfigT, typename LockConfigT>
CreateNewTLAB(panda::ManagedThread * thread,size_t size)215 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateNewTLAB([[maybe_unused]] panda::ManagedThread *thread,
216 size_t size)
217 {
218 ASSERT(size <= GetMaxRegularObjectSize());
219 ASSERT(AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT)) == size);
220 TLAB *tlab = nullptr;
221
222 {
223 os::memory::LockHolder lock(this->region_lock_);
224 Region *region = nullptr;
225 // first search in partial tlab map
226 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
227 if constexpr (USE_PARTIAL_TLAB) {
228 auto largest_tlab = retained_tlabs_.begin();
229 if (largest_tlab != retained_tlabs_.end() && largest_tlab->first >= size) {
230 LOG(DEBUG, ALLOC) << "Use retained tlabs region " << region;
231 region = largest_tlab->second;
232 retained_tlabs_.erase(largest_tlab);
233 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
234 }
235 }
236
237 // allocate a free region if none partial tlab has enough space
238 if (region == nullptr) {
239 region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_EDEN);
240 if (LIKELY(region != nullptr)) {
241 region->CreateTLABSupport();
242 }
243 }
244 if (region != nullptr) {
245 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
246 if constexpr (!USE_PARTIAL_TLAB) {
247 // We don't reuse the same region for different TLABs.
248 // Therefore, update the size
249 size = region->GetRemainingSizeForTLABs();
250 }
251 tlab = region->CreateTLAB(size);
252 ASSERT(tlab != nullptr);
253 ASAN_UNPOISON_MEMORY_REGION(tlab->GetStartAddr(), tlab->GetSize());
254 AllocConfigT::MemoryInit(tlab->GetStartAddr(), tlab->GetSize());
255 ASAN_POISON_MEMORY_REGION(tlab->GetStartAddr(), tlab->GetSize());
256 LOG(DEBUG, ALLOC) << "Found a region " << region << " and create tlab " << tlab << " with memory starts at "
257 << tlab->GetStartAddr() << " and with size " << tlab->GetSize();
258 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
259 if constexpr (USE_PARTIAL_TLAB) {
260 auto remaining_size = region->GetRemainingSizeForTLABs();
261 if (remaining_size >= size) {
262 LOG(DEBUG, ALLOC) << "Add a region " << region << " with remained size " << remaining_size
263 << " to retained_tlabs";
264 retained_tlabs_.insert(std::make_pair(remaining_size, region));
265 }
266 }
267 }
268 }
269
270 return tlab;
271 }
272
273 template <typename AllocConfigT, typename LockConfigT>
274 // TODO(agrebenkin) add set of flags from which to pick the regions and make it compile time
275 template <bool include_current_region>
GetTopGarbageRegions(size_t region_count)276 PandaVector<Region *> RegionAllocator<AllocConfigT, LockConfigT>::GetTopGarbageRegions(size_t region_count)
277 {
278 PandaPriorityQueue<std::pair<uint32_t, Region *>> queue;
279 this->GetSpace()->IterateRegions([&](Region *region) {
280 if (region->HasFlag(IS_EDEN)) {
281 return;
282 }
283 if constexpr (!include_current_region) {
284 if (IsInCurrentRegion<true, RegionFlag::IS_OLD>(region)) {
285 return;
286 }
287 }
288 auto garbage_bytes = region->GetGarbageBytes();
289 queue.push(std::pair<uint32_t, Region *>(garbage_bytes, region));
290 });
291 PandaVector<Region *> regions;
292 for (size_t i = 0; i < region_count && !queue.empty(); i++) {
293 auto *region = queue.top().second;
294 regions.push_back(region);
295 queue.pop();
296 }
297 return regions;
298 }
299
300 template <typename AllocConfigT, typename LockConfigT>
301 template <RegionFlag regions_type>
GetAllSpecificRegions()302 PandaVector<Region *> RegionAllocator<AllocConfigT, LockConfigT>::GetAllSpecificRegions()
303 {
304 PandaVector<Region *> vector;
305 this->GetSpace()->IterateRegions([&](Region *region) {
306 if (region->HasFlag(regions_type)) {
307 vector.push_back(region);
308 }
309 });
310 return vector;
311 }
312
313 template <typename AllocConfigT, typename LockConfigT>
314 template <RegionFlag regions_type_from, RegionFlag regions_type_to, bool use_marked_bitmap>
CompactAllSpecificRegions(const GCObjectVisitor & death_checker,const ObjectVisitorEx & move_handler)315 void RegionAllocator<AllocConfigT, LockConfigT>::CompactAllSpecificRegions(const GCObjectVisitor &death_checker,
316 const ObjectVisitorEx &move_handler)
317 {
318 // NOLINTNEXTLINE(readability-braces-around-statements)
319 if constexpr (regions_type_from == regions_type_to) { // NOLINT(bugprone-suspicious-semicolon)
320 // TODO(aemelenko): Implement it if need to call this method with the same regions type.
321 // There is an issue with IterateRegions during creating a new one.
322 ASSERT(regions_type_from != regions_type_to);
323 ResetCurrentRegion<false, regions_type_to>();
324 }
325 this->GetSpace()->IterateRegions([&](Region *region) {
326 if (!region->HasFlag(regions_type_from)) {
327 return;
328 }
329 CompactSpecificRegion<regions_type_from, regions_type_to, use_marked_bitmap>(region, death_checker,
330 move_handler);
331 });
332 }
333
334 template <typename AllocConfigT, typename LockConfigT>
335 template <RegionFlag regions_type_from, RegionFlag regions_type_to, bool use_marked_bitmap>
CompactSeveralSpecificRegions(const PandaVector<Region * > & regions,const GCObjectVisitor & death_checker,const ObjectVisitorEx & move_handler)336 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSeveralSpecificRegions(const PandaVector<Region *> ®ions,
337 const GCObjectVisitor &death_checker,
338 const ObjectVisitorEx &move_handler)
339 {
340 for (auto i : regions) {
341 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
342 if constexpr (regions_type_from == regions_type_to) {
343 [[maybe_unused]] bool founded_region = IsInCurrentRegion<false, regions_type_to>(i);
344 ASSERT(!founded_region);
345 }
346 CompactSpecificRegion<regions_type_from, regions_type_to, use_marked_bitmap>(i, death_checker, move_handler);
347 }
348 }
349
350 template <typename AllocConfigT, typename LockConfigT>
351 template <RegionFlag regions_type_from, RegionFlag regions_type_to, bool use_marked_bitmap>
CompactSpecificRegion(Region * region,const GCObjectVisitor & death_checker,const ObjectVisitorEx & move_handler)352 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSpecificRegion(Region *region,
353 const GCObjectVisitor &death_checker,
354 const ObjectVisitorEx &move_handler)
355 {
356 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
357 if constexpr (regions_type_from == regions_type_to) {
358 // It is bad if we compact one region into itself.
359 [[maybe_unused]] bool is_current_region = IsInCurrentRegion<true, regions_type_to>(region);
360 ASSERT(!is_current_region);
361 }
362 auto create_new_region = [&]() {
363 os::memory::LockHolder lock(this->region_lock_);
364 Region *region_to = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, regions_type_to);
365 ASSERT(region_to != nullptr);
366 return region_to;
367 };
368
369 Region *region_to = PopFromRegionQueue<true, regions_type_to>();
370 if (region_to == nullptr) {
371 region_to = create_new_region();
372 }
373 size_t live_bytes = 0;
374 // Don't use atomic in this method because we work with not shared region
375 auto visitor = [&](ObjectHeader *object) {
376 if (death_checker(object) == ObjectStatus::ALIVE_OBJECT) {
377 size_t object_size = GetObjectSize(object);
378 size_t aligned_size = AlignUp(object_size, DEFAULT_ALIGNMENT_IN_BYTES);
379 void *dst = region_to->template Alloc<false>(aligned_size);
380 if (dst == nullptr) {
381 region_to->SetLiveBytes(region_to->GetLiveBytes() + live_bytes);
382 live_bytes = 0;
383 region_to = create_new_region();
384 dst = region_to->template Alloc<false>(aligned_size);
385 }
386 // Don't initialize memory for an object here because we will use memcpy anyway
387 ASSERT(dst != nullptr);
388 memcpy_s(dst, object_size, object, object_size);
389 // need to mark as alive moved object
390 ASSERT(region_to->GetLiveBitmap() != nullptr);
391 region_to->IncreaseAllocatedObjects();
392 region_to->GetLiveBitmap()->Set(dst);
393 live_bytes += aligned_size;
394 move_handler(object, static_cast<ObjectHeader *>(dst));
395 }
396 };
397
398 ASSERT(region->HasFlag(regions_type_from));
399
400 const std::function<void(ObjectHeader *)> visitor_functor(visitor);
401 // NOLINTNEXTLINE(readability-braces-around-statements)
402 if constexpr (use_marked_bitmap) {
403 // TODO(grebenkin): use live bitmap, remove CloneMarkBitmapToLiveBitmap, beware of young-regions
404 region->GetMarkBitmap()->IterateOverMarkedChunks(
405 [&](void *object_addr) { visitor_functor(static_cast<ObjectHeader *>(object_addr)); });
406 } else { // NOLINT(readability-misleading-indentation)
407 region->IterateOverObjects(visitor_functor);
408 }
409 region_to->SetLiveBytes(region_to->GetLiveBytes() + live_bytes);
410
411 PushToRegionQueue<true, regions_type_to>(region_to);
412 }
413
414 template <typename AllocConfigT, typename LockConfigT>
415 template <bool use_marked_bitmap>
PromoteYoungRegion(Region * region,const GCObjectVisitor & death_checker,const ObjectVisitor & alive_objects_handler)416 void RegionAllocator<AllocConfigT, LockConfigT>::PromoteYoungRegion(Region *region,
417 const GCObjectVisitor &death_checker,
418 const ObjectVisitor &alive_objects_handler)
419 {
420 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
421 // We should do it here, because we don't create a live bitmap during young regions creation
422 region->CreateLiveBitmap();
423 size_t live_bytes = 0;
424 auto visitor = [&](ObjectHeader *object) {
425 if (death_checker(object) == ObjectStatus::ALIVE_OBJECT) {
426 alive_objects_handler(object);
427 region->IncreaseAllocatedObjects();
428 region->GetLiveBitmap()->Set(object);
429 live_bytes += GetAlignedObjectSize(GetObjectSize(object));
430 }
431 };
432 // NOLINTNEXTLINE(readability-braces-around-statements)
433 if constexpr (use_marked_bitmap) {
434 region->GetMarkBitmap()->IterateOverMarkedChunks(
435 [&](void *object_addr) { visitor(static_cast<ObjectHeader *>(object_addr)); });
436 } else { // NOLINT(readability-misleading-indentation)
437 region->IterateOverObjects(visitor);
438 }
439 region->SetLiveBytes(live_bytes);
440 this->GetSpace()->PromoteYoungRegion(region);
441 }
442
443 template <typename AllocConfigT, typename LockConfigT>
444 template <RegionFlag regions_type>
ResetAllSpecificRegions()445 void RegionAllocator<AllocConfigT, LockConfigT>::ResetAllSpecificRegions()
446 {
447 ResetCurrentRegion<false, regions_type>();
448 this->GetSpace()->IterateRegions([&](Region *region) {
449 if (!region->HasFlag(regions_type)) {
450 return;
451 }
452 this->GetSpace()->FreeRegion(region);
453 });
454 if constexpr (regions_type == RegionFlag::IS_EDEN) {
455 retained_tlabs_.clear();
456 }
457 }
458
459 template <typename AllocConfigT, typename LockConfigT>
460 template <RegionFlag regions_type>
ResetSeveralSpecificRegions(const PandaVector<Region * > & regions)461 void RegionAllocator<AllocConfigT, LockConfigT>::ResetSeveralSpecificRegions(const PandaVector<Region *> ®ions)
462 {
463 // TODO(aemelenko): If we need to reset several young regions, we should implement it.
464 ASSERT(regions_type != RegionFlag::IS_EDEN);
465 ASSERT((regions_type != RegionFlag::IS_EDEN) || (retained_tlabs_.empty()));
466 for (auto i : regions) {
467 [[maybe_unused]] bool is_current_regions = IsInCurrentRegion<false, regions_type>(i);
468 ASSERT(!is_current_regions);
469 ASSERT(i->HasFlag(regions_type));
470 this->GetSpace()->FreeRegion(i);
471 }
472 }
473
474 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * mem_stats,GenerationalSpaces * spaces,SpaceType space_type,size_t init_space_size,bool extend)475 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
476 MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type, size_t init_space_size, bool extend)
477 : RegionAllocatorBase<LockConfigT>(mem_stats, spaces, space_type, ObjectAllocator::GetAllocatorType(),
478 init_space_size, extend, REGION_SIZE),
479 object_allocator_(mem_stats, space_type)
480 {
481 }
482
483 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * mem_stats,GenerationalSpaces * spaces,SpaceType space_type,RegionPool * shared_region_pool)484 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
485 MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type, RegionPool *shared_region_pool)
486 : RegionAllocatorBase<LockConfigT>(mem_stats, spaces, space_type, ObjectAllocator::GetAllocatorType(),
487 shared_region_pool),
488 object_allocator_(mem_stats, space_type)
489 {
490 }
491
492 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Alloc(size_t size,Alignment align)493 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Alloc(size_t size, Alignment align)
494 {
495 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
496 size_t align_size = AlignUp(size, GetAlignmentInBytes(align));
497 ASSERT(align_size <= ObjectAllocator::GetMaxSize());
498
499 void *mem = object_allocator_.Alloc(align_size);
500 if (UNLIKELY(mem == nullptr)) {
501 mem = NewRegionAndRetryAlloc(size, align);
502 if (UNLIKELY(mem == nullptr)) {
503 return nullptr;
504 }
505 }
506 auto live_bitmap = this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap();
507 ASSERT(live_bitmap != nullptr);
508 live_bitmap->AtomicTestAndSet(mem);
509 return mem;
510 }
511
512 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Free(void * mem)513 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Free(void *mem)
514 {
515 this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap()->AtomicTestAndClear(mem);
516
517 object_allocator_.Free(mem);
518 }
519
520 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Collect(const GCObjectVisitor & death_checker)521 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Collect(
522 const GCObjectVisitor &death_checker)
523 {
524 os::memory::LockHolder lock(this->region_lock_);
525 object_allocator_.Collect(death_checker);
526 }
527
528 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
VisitAndRemoveFreeRegions(const RegionsVisitor & region_visitor)529 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::VisitAndRemoveFreeRegions(
530 const RegionsVisitor ®ion_visitor)
531 {
532 os::memory::LockHolder lock(this->region_lock_);
533 // Add free region into vector to not do extra work with region_visitor
534 // inside object_allocator_.
535 PandaVector<Region *> free_regions;
536
537 object_allocator_.VisitAndRemoveFreePools([&free_regions](void *mem, [[maybe_unused]] size_t size) {
538 auto *region = AddrToRegion(mem);
539 ASSERT(ToUintPtr(mem) + size == region->End());
540 // We don't remove this region here, because don't want to do some extra work with visitor here.
541 free_regions.push_back(region);
542 });
543
544 if (!free_regions.empty()) {
545 region_visitor(free_regions);
546
547 for (auto i : free_regions) {
548 this->GetSpace()->FreeRegion(i);
549 }
550 }
551 }
552
553 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
NewRegionAndRetryAlloc(size_t object_size,Alignment align)554 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::NewRegionAndRetryAlloc(size_t object_size,
555 Alignment align)
556 {
557 os::memory::LockHolder lock(this->region_lock_);
558 size_t pool_head_size = AlignUp(Region::HeadSize(), ObjectAllocator::PoolAlign());
559 ASSERT(AlignUp(pool_head_size + object_size, REGION_SIZE) == REGION_SIZE);
560 while (true) {
561 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_NONMOVABLE);
562 if (UNLIKELY(region == nullptr)) {
563 return nullptr;
564 }
565 ASSERT(region->GetLiveBitmap() != nullptr);
566 uintptr_t aligned_pool = ToUintPtr(region) + pool_head_size;
567 bool added_memory_pool = object_allocator_.AddMemoryPool(ToVoidPtr(aligned_pool), REGION_SIZE - pool_head_size);
568 ASSERT(added_memory_pool);
569 if (UNLIKELY(!added_memory_pool)) {
570 LOG(FATAL, ALLOC) << "ObjectAllocator: couldn't add memory pool to allocator";
571 }
572 void *mem = object_allocator_.Alloc(object_size, align);
573 if (LIKELY(mem != nullptr)) {
574 return mem;
575 }
576 }
577 return nullptr;
578 }
579
580 template <typename AllocConfigT, typename LockConfigT>
RegionHumongousAllocator(MemStatsType * mem_stats,GenerationalSpaces * spaces,SpaceType space_type)581 RegionHumongousAllocator<AllocConfigT, LockConfigT>::RegionHumongousAllocator(MemStatsType *mem_stats,
582 GenerationalSpaces *spaces,
583 SpaceType space_type)
584 : RegionAllocatorBase<LockConfigT>(mem_stats, spaces, space_type, AllocatorType::REGION_ALLOCATOR, 0, true,
585 REGION_SIZE)
586 {
587 }
588
589 template <typename AllocConfigT, typename LockConfigT>
590 template <bool update_memstats>
Alloc(size_t size,Alignment align)591 void *RegionHumongousAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
592 {
593 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
594 size_t align_size = AlignUp(size, GetAlignmentInBytes(align));
595 Region *region = nullptr;
596 void *mem = nullptr;
597 // allocate a seprate large region for object
598 {
599 os::memory::LockHolder lock(this->region_lock_);
600 region = this->template CreateAndSetUpNewRegion<AllocConfigT>(Region::RegionSize(align_size, REGION_SIZE),
601 IS_OLD, IS_LARGE_OBJECT);
602 if (LIKELY(region != nullptr)) {
603 mem = region->Alloc<false>(align_size);
604 ASSERT(mem != nullptr);
605 ASSERT(region->GetLiveBitmap() != nullptr);
606 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
607 if constexpr (update_memstats) {
608 AllocConfigT::OnAlloc(region->Size(), this->space_type_, this->mem_stats_);
609 AllocConfigT::MemoryInit(mem, size);
610 }
611 // Do it after memory init because we can reach this memory after setting live bitmap
612 region->GetLiveBitmap()->AtomicTestAndSet(mem);
613 }
614 }
615 return mem;
616 }
617
618 template <typename AllocConfigT, typename LockConfigT>
CollectAndRemoveFreeRegions(const RegionsVisitor & region_visitor,const GCObjectVisitor & death_checker)619 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::CollectAndRemoveFreeRegions(
620 const RegionsVisitor ®ion_visitor, const GCObjectVisitor &death_checker)
621 {
622 // Add free region into vector to not do extra work with region_visitor during region iteration
623 PandaVector<Region *> free_regions;
624
625 {
626 os::memory::LockHolder lock(this->region_lock_);
627 this->GetSpace()->IterateRegions([&](Region *region) {
628 this->Collect(region, death_checker);
629 if (region->HasFlag(IS_FREE)) {
630 free_regions.push_back(region);
631 }
632 });
633 }
634
635 if (!free_regions.empty()) {
636 region_visitor(free_regions);
637
638 for (auto i : free_regions) {
639 os::memory::LockHolder lock(this->region_lock_);
640 ResetRegion(i);
641 }
642 }
643 }
644
645 template <typename AllocConfigT, typename LockConfigT>
Collect(Region * region,const GCObjectVisitor & death_checker)646 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::Collect(Region *region, const GCObjectVisitor &death_checker)
647 {
648 ASSERT(region->HasFlag(RegionFlag::IS_LARGE_OBJECT));
649 ObjectHeader *object_to_proceed = nullptr;
650 object_to_proceed = region->GetLargeObject();
651 if (death_checker(object_to_proceed) == ObjectStatus::DEAD_OBJECT) {
652 region->AddFlag(RegionFlag::IS_FREE);
653 }
654 }
655
656 template <typename AllocConfigT, typename LockConfigT>
ResetRegion(Region * region)657 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::ResetRegion(Region *region)
658 {
659 ASSERT(region->HasFlag(RegionFlag::IS_FREE));
660 region->RmvFlag(RegionFlag::IS_FREE);
661 this->GetSpace()->FreeRegion(region);
662 }
663
664 template <typename AllocConfigT, typename LockConfigT>
665 using RegionRunslotsAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, RunSlotsAllocator<AllocConfigT>>;
666
667 template <typename AllocConfigT, typename LockConfigT>
668 using RegionFreeListAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, FreeListAllocator<AllocConfigT>>;
669
670 } // namespace panda::mem
671
672 #endif // PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
673