1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #ifndef PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
16 #define PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
17
18 #include "libpandabase/mem/mem.h"
19 #include "libpandabase/utils/logger.h"
20 #include "runtime/include/runtime.h"
21 #include "runtime/include/thread.h"
22 #include "runtime/include/gc_task.h"
23 #include "runtime/mem/region_allocator.h"
24 #include "runtime/mem/region_space-inl.h"
25 #include "runtime/mem/runslots_allocator-inl.h"
26 #include "runtime/mem/freelist_allocator-inl.h"
27 #include "runtime/mem/alloc_config.h"
28 #include "runtime/arch/memory_helpers.h"
29
30 namespace panda::mem {
31
32 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,AllocatorType allocatorType,size_t initSpaceSize,bool extend,size_t regionSize,size_t emptyTenuredRegionsMaxCount)33 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces,
34 SpaceType spaceType, AllocatorType allocatorType,
35 size_t initSpaceSize, bool extend, size_t regionSize,
36 size_t emptyTenuredRegionsMaxCount)
37 : memStats_(memStats),
38 spaceType_(spaceType),
39 spaces_(spaces),
40 regionPool_(regionSize, extend, spaces,
41 InternalAllocatorPtr(InternalAllocator<>::GetInternalAllocatorFromRuntime())),
42 regionSpace_(spaceType, allocatorType, ®ionPool_, emptyTenuredRegionsMaxCount),
43 initBlock_(0, nullptr)
44 {
45 ASSERT(spaceType_ == SpaceType::SPACE_TYPE_OBJECT || spaceType_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT ||
46 spaceType_ == SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
47 ASSERT(regionSize != 0);
48 initBlock_ = NULLPOOL;
49 if (initSpaceSize > 0) {
50 ASSERT(initSpaceSize % regionSize == 0);
51 initBlock_ = spaces_->AllocSharedPool(initSpaceSize, spaceType, AllocatorType::REGION_ALLOCATOR, this);
52 ASSERT(initBlock_.GetMem() != nullptr);
53 ASSERT(initBlock_.GetSize() >= initSpaceSize);
54 if (initBlock_.GetMem() != nullptr) {
55 regionPool_.InitRegionBlock(ToUintPtr(initBlock_.GetMem()), ToUintPtr(initBlock_.GetMem()) + initSpaceSize);
56 ASAN_POISON_MEMORY_REGION(initBlock_.GetMem(), initBlock_.GetSize());
57 }
58 }
59 }
60
61 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,AllocatorType allocatorType,RegionPool * sharedRegionPool,size_t emptyTenuredRegionsMaxCount)62 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces,
63 SpaceType spaceType, AllocatorType allocatorType,
64 RegionPool *sharedRegionPool, size_t emptyTenuredRegionsMaxCount)
65 : memStats_(memStats),
66 spaces_(spaces),
67 spaceType_(spaceType),
68 regionPool_(0, false, spaces, nullptr), // unused
69 regionSpace_(spaceType, allocatorType, sharedRegionPool, emptyTenuredRegionsMaxCount),
70 initBlock_(0, nullptr) // unused
71 {
72 ASSERT(spaceType_ == SpaceType::SPACE_TYPE_OBJECT || spaceType_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
73 }
74
75 template <typename LockConfigT>
76 template <typename AllocConfigT, OSPagesAllocPolicy OS_ALLOC_POLICY>
CreateAndSetUpNewRegion(size_t regionSize,RegionFlag regionType,RegionFlag properties)77 Region *RegionAllocatorBase<LockConfigT>::CreateAndSetUpNewRegion(size_t regionSize, RegionFlag regionType,
78 RegionFlag properties)
79 {
80 Region *region = AllocRegion<OS_ALLOC_POLICY>(regionSize, regionType, properties);
81 if (LIKELY(region != nullptr)) {
82 if (regionType == RegionFlag::IS_EDEN) {
83 AllocConfigT::OnInitYoungRegion({region->Begin(), region->End()});
84 }
85 // Do memory barrier here to make sure all threads see references to bitmaps.
86 // The situation:
87 // A mutator thread allocates a new object. During object allocation the mutator
88 // allocates a new region, sets up the region header, allocates object in the region and publishes
89 // the reference to the object.
90 // GC thread does concurrent marking. It sees the reference to the new object and gets the region
91 // by the object address.
92 // Since GC thread doesn't locks region_lock_ we need to do memory barrier here to make
93 // sure GC thread sees all bitmaps from the region header.
94 arch::FullMemoryBarrier();
95 // Getting region by object is a bit operation and TSAN doesn't
96 // sees the relation between region creation and region access.
97 // This annotation suggests TSAN that this code always executes before
98 // the region will be accessed.
99 // See the corresponding annotation in ObjectToRegion
100 TSAN_ANNOTATE_HAPPENS_BEFORE(region);
101 }
102 return region;
103 }
104
105 template <typename LockConfigT>
GetAllRegions()106 PandaVector<Region *> RegionAllocatorBase<LockConfigT>::GetAllRegions()
107 {
108 PandaVector<Region *> vector;
109 os::memory::LockHolder lock(this->regionLock_);
110 GetSpace()->IterateRegions([&](Region *region) { vector.push_back(region); });
111 return vector;
112 }
113
114 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,size_t initSpaceSize,bool extend,size_t emptyTenuredRegionsMaxCount)115 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
116 SpaceType spaceType, size_t initSpaceSize, bool extend,
117 size_t emptyTenuredRegionsMaxCount)
118 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, initSpaceSize,
119 extend, REGION_SIZE, emptyTenuredRegionsMaxCount),
120 fullRegion_(nullptr, 0, 0),
121 edenCurrentRegion_(&fullRegion_)
122 {
123 }
124
125 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,RegionPool * sharedRegionPool,size_t emptyTenuredRegionsMaxCount)126 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
127 SpaceType spaceType, RegionPool *sharedRegionPool,
128 size_t emptyTenuredRegionsMaxCount)
129 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, sharedRegionPool,
130 emptyTenuredRegionsMaxCount),
131 fullRegion_(nullptr, 0, 0),
132 edenCurrentRegion_(&fullRegion_)
133 {
134 }
135
136 template <typename AllocConfigT, typename LockConfigT>
137 template <RegionFlag REGION_TYPE>
AllocRegular(size_t alignSize)138 void *RegionAllocator<AllocConfigT, LockConfigT>::AllocRegular(size_t alignSize)
139 {
140 static constexpr bool IS_ATOMIC = std::is_same_v<LockConfigT, RegionAllocatorLockConfig::CommonLock>;
141 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
142 if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
143 void *mem = GetCurrentRegion<IS_ATOMIC, REGION_TYPE>()->template Alloc<IS_ATOMIC>(alignSize);
144 if (mem != nullptr) {
145 return mem;
146 }
147
148 os::memory::LockHolder lock(this->regionLock_);
149 mem = GetCurrentRegion<IS_ATOMIC, REGION_TYPE>()->template Alloc<IS_ATOMIC>(alignSize);
150 if (mem != nullptr) {
151 return mem;
152 }
153
154 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGION_TYPE);
155 if (LIKELY(region != nullptr)) {
156 // Here we need memory barrier to make the allocation visible
157 // in all threads before SetCurrentRegion
158 mem = region->template Alloc<IS_ATOMIC>(alignSize);
159 SetCurrentRegion<IS_ATOMIC, REGION_TYPE>(region);
160 }
161
162 return mem;
163 }
164 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
165 if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
166 void *mem = nullptr;
167 Region *regionTo = PopFromRegionQueue<IS_ATOMIC, REGION_TYPE>();
168 if (regionTo != nullptr) {
169 mem = regionTo->template Alloc<false>(alignSize);
170 if (mem != nullptr) {
171 PushToRegionQueue<IS_ATOMIC, REGION_TYPE>(regionTo);
172 return mem;
173 }
174 }
175
176 os::memory::LockHolder lock(this->regionLock_);
177 regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGION_TYPE);
178 if (LIKELY(regionTo != nullptr)) {
179 mem = regionTo->template Alloc<false>(alignSize);
180 PushToRegionQueue<IS_ATOMIC, REGION_TYPE>(regionTo);
181 }
182
183 return mem;
184 }
185 UNREACHABLE();
186 return nullptr;
187 }
188
189 template <typename AllocConfigT, typename LockConfigT>
190 template <RegionFlag REGION_TYPE, bool UPDATE_MEMSTATS>
Alloc(size_t size,Alignment align)191 void *RegionAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
192 {
193 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
194 size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
195 void *mem = nullptr;
196 // for movable & regular size object, allocate it from a region
197 // for nonmovable or large size object, allocate a seprate large region for it
198 if (this->GetSpaceType() != SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT &&
199 LIKELY(alignSize <= GetMaxRegularObjectSize())) {
200 mem = AllocRegular<REGION_TYPE>(alignSize);
201 } else {
202 os::memory::LockHolder lock(this->regionLock_);
203 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(
204 Region::RegionSize(alignSize, REGION_SIZE), REGION_TYPE, IS_LARGE_OBJECT);
205 if (LIKELY(region != nullptr)) {
206 mem = region->Alloc<false>(alignSize);
207 }
208 }
209 if (mem != nullptr) {
210 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
211 if constexpr (UPDATE_MEMSTATS) {
212 AllocConfigT::OnAlloc(alignSize, this->spaceType_, this->memStats_);
213 AllocConfigT::MemoryInit(mem);
214 }
215 }
216 return mem;
217 }
218
219 template <typename AllocConfigT, typename LockConfigT>
PinObject(ObjectHeader * object)220 void RegionAllocator<AllocConfigT, LockConfigT>::PinObject(ObjectHeader *object)
221 {
222 auto *region = ObjectToRegion(object);
223 ASSERT(region != nullptr);
224 region->PinObject();
225 }
226
227 template <typename AllocConfigT, typename LockConfigT>
UnpinObject(ObjectHeader * object)228 void RegionAllocator<AllocConfigT, LockConfigT>::UnpinObject(ObjectHeader *object)
229 {
230 auto *region = ObjectToRegion(object);
231 ASSERT(region != nullptr);
232 region->UnpinObject();
233 }
234
235 template <typename AllocConfigT, typename LockConfigT>
CreateTLAB(size_t size)236 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateTLAB(size_t size)
237 {
238 ASSERT(size <= GetMaxRegularObjectSize());
239 ASSERT(AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT)) == size);
240 TLAB *tlab = nullptr;
241
242 {
243 os::memory::LockHolder lock(this->regionLock_);
244 Region *region = nullptr;
245 // first search in partial tlab map
246 auto largestTlab = retainedTlabs_.begin();
247 if (largestTlab != retainedTlabs_.end() && largestTlab->first >= size) {
248 LOG(DEBUG, ALLOC) << "Use retained tlabs region " << region;
249 region = largestTlab->second;
250 retainedTlabs_.erase(largestTlab);
251 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
252 }
253
254 // allocate a free region if none partial tlab has enough space
255 if (region == nullptr) {
256 region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_EDEN);
257 if (LIKELY(region != nullptr)) {
258 region->CreateTLABSupport();
259 }
260 }
261 if (region != nullptr) {
262 tlab = CreateTLABInRegion(region, size);
263 auto remainingSize = region->GetRemainingSizeForTLABs();
264 if (remainingSize >= size) {
265 LOG(DEBUG, ALLOC) << "Add a region " << region << " with remained size " << remainingSize
266 << " to retained_tlabs";
267 retainedTlabs_.insert(std::make_pair(remainingSize, region));
268 }
269 }
270 }
271
272 return tlab;
273 }
274
275 template <typename AllocConfigT, typename LockConfigT>
CreateRegionSizeTLAB()276 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateRegionSizeTLAB()
277 {
278 TLAB *tlab = nullptr;
279
280 os::memory::LockHolder lock(this->regionLock_);
281 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_EDEN);
282 if (LIKELY(region != nullptr)) {
283 region->CreateTLABSupport();
284 size_t size = region->GetRemainingSizeForTLABs();
285 tlab = CreateTLABInRegion(region, size);
286 }
287
288 return tlab;
289 }
290
291 template <typename AllocConfigT, typename LockConfigT>
CreateTLABInRegion(Region * region,size_t size)292 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateTLABInRegion(Region *region, size_t size)
293 {
294 // We don't reuse the same region for different TLABs.
295 // Therefore, update the size
296 TLAB *tlab = region->CreateTLAB(size);
297 ASSERT(tlab != nullptr);
298 LOG(DEBUG, ALLOC) << "Found a region " << region << " and create tlab " << tlab << " with memory starts at "
299 << tlab->GetStartAddr() << " and with size " << tlab->GetSize();
300 return tlab;
301 }
302
303 template <typename AllocConfigT, typename LockConfigT>
304 template <bool INCLUDE_CURRENT_REGION>
GetTopGarbageRegions()305 PandaPriorityQueue<std::pair<uint32_t, Region *>> RegionAllocator<AllocConfigT, LockConfigT>::GetTopGarbageRegions()
306 {
307 PandaPriorityQueue<std::pair<uint32_t, Region *>> queue;
308 this->GetSpace()->IterateRegions([&](Region *region) {
309 if (region->HasFlag(IS_EDEN) || region->HasFlag(RegionFlag::IS_RESERVED) || region->HasPinnedObjects()) {
310 return;
311 }
312 if constexpr (!INCLUDE_CURRENT_REGION) {
313 if (IsInCurrentRegion<true, RegionFlag::IS_OLD>(region)) {
314 return;
315 }
316 }
317 auto garbageBytes = region->GetGarbageBytes();
318 queue.push(std::pair<uint32_t, Region *>(garbageBytes, region));
319 });
320 return queue;
321 }
322
323 template <typename AllocConfigT, typename LockConfigT>
324 template <RegionFlag REGIONS_TYPE>
GetAllSpecificRegions()325 PandaVector<Region *> RegionAllocator<AllocConfigT, LockConfigT>::GetAllSpecificRegions()
326 {
327 PandaVector<Region *> vector;
328 this->GetSpace()->IterateRegions([&](Region *region) {
329 if (region->HasFlag(REGIONS_TYPE)) {
330 vector.push_back(region);
331 }
332 });
333 return vector;
334 }
335
336 template <typename AllocConfigT, typename LockConfigT>
337 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactAllSpecificRegions(const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)338 void RegionAllocator<AllocConfigT, LockConfigT>::CompactAllSpecificRegions(const GCObjectVisitor &deathChecker,
339 const ObjectVisitorEx &moveHandler)
340 {
341 // NOLINTNEXTLINE(readability-braces-around-statements)
342 if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) { // NOLINT(bugprone-suspicious-semicolon)
343 // NOTE(aemelenko): Implement it if need to call this method with the same regions type.
344 // There is an issue with IterateRegions during creating a new one.
345 ASSERT(REGIONS_TYPE_FROM != REGIONS_TYPE_TO);
346 ResetCurrentRegion<false, REGIONS_TYPE_TO>();
347 }
348 this->GetSpace()->IterateRegions([this, &deathChecker, &moveHandler](Region *region) {
349 if (!region->HasFlag(REGIONS_TYPE_FROM)) {
350 return;
351 }
352 CompactSpecificRegion<REGIONS_TYPE_FROM, REGIONS_TYPE_TO, USE_MARKED_BITMAP>(region, deathChecker, moveHandler);
353 });
354 }
355
356 template <typename AllocConfigT, typename LockConfigT>
357 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactSeveralSpecificRegions(const PandaVector<Region * > & regions,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)358 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSeveralSpecificRegions(const PandaVector<Region *> ®ions,
359 const GCObjectVisitor &deathChecker,
360 const ObjectVisitorEx &moveHandler)
361 {
362 for (auto i : regions) {
363 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
364 if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) {
365 [[maybe_unused]] bool foundedRegion = IsInCurrentRegion<false, REGIONS_TYPE_TO>(i);
366 ASSERT(!foundedRegion);
367 }
368 CompactSpecificRegion<REGIONS_TYPE_FROM, REGIONS_TYPE_TO, USE_MARKED_BITMAP>(i, deathChecker, moveHandler);
369 }
370 }
371
372 template <typename AllocConfigT, typename LockConfigT>
373 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactSpecificRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)374 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSpecificRegion(Region *region,
375 const GCObjectVisitor &deathChecker,
376 const ObjectVisitorEx &moveHandler)
377 {
378 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
379 if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) {
380 // It is bad if we compact one region into itself.
381 [[maybe_unused]] bool isCurrentRegion = IsInCurrentRegion<true, REGIONS_TYPE_TO>(region);
382 ASSERT(!isCurrentRegion);
383 }
384 auto createNewRegion = [&]() {
385 os::memory::LockHolder lock(this->regionLock_);
386 Region *regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGIONS_TYPE_TO);
387 ASSERT(regionTo != nullptr);
388 return regionTo;
389 };
390
391 Region *regionTo = PopFromRegionQueue<true, REGIONS_TYPE_TO>();
392 if (regionTo == nullptr) {
393 regionTo = createNewRegion();
394 }
395 size_t liveBytes = 0;
396 // Don't use atomic in this method because we work with not shared region
397 auto visitor = [&](ObjectHeader *object) {
398 // If we use mark-bitmap then we iterate over alive object, so no need death-checker
399 if constexpr (!USE_MARKED_BITMAP) {
400 if (deathChecker(object) != ObjectStatus::ALIVE_OBJECT) {
401 return;
402 }
403 }
404 size_t objectSize = GetObjectSize(object);
405 size_t alignedSize = AlignUp(objectSize, DEFAULT_ALIGNMENT_IN_BYTES);
406 void *dst = regionTo->template Alloc<false>(alignedSize);
407 if (dst == nullptr) {
408 regionTo->SetLiveBytes(regionTo->GetLiveBytes() + liveBytes);
409 liveBytes = 0;
410 regionTo = createNewRegion();
411 dst = regionTo->template Alloc<false>(alignedSize);
412 }
413 // Don't initialize memory for an object here because we will use memcpy anyway
414 ASSERT(dst != nullptr);
415 memcpy_s(dst, objectSize, object, objectSize);
416 // need to mark as alive moved object
417 ASSERT(regionTo->GetLiveBitmap() != nullptr);
418 regionTo->IncreaseAllocatedObjects();
419 regionTo->GetLiveBitmap()->Set(dst);
420 liveBytes += alignedSize;
421 moveHandler(object, static_cast<ObjectHeader *>(dst));
422 };
423
424 ASSERT(region->HasFlag(REGIONS_TYPE_FROM));
425
426 const std::function<void(ObjectHeader *)> visitorFunctor(visitor);
427 // NOLINTNEXTLINE(readability-braces-around-statements)
428 if constexpr (USE_MARKED_BITMAP) {
429 region->GetMarkBitmap()->IterateOverMarkedChunks(
430 [&visitorFunctor](void *objectAddr) { visitorFunctor(static_cast<ObjectHeader *>(objectAddr)); });
431 } else { // NOLINT(readability-misleading-indentation)
432 region->IterateOverObjects(visitorFunctor);
433 }
434 regionTo->SetLiveBytes(regionTo->GetLiveBytes() + liveBytes);
435
436 PushToRegionQueue<true, REGIONS_TYPE_TO>(regionTo);
437 }
438
439 template <typename AllocConfigT, typename LockConfigT>
ReserveRegionIfNeeded()440 void RegionAllocator<AllocConfigT, LockConfigT>::ReserveRegionIfNeeded()
441 {
442 if (reservedRegion_ != nullptr) {
443 return;
444 }
445 reservedRegion_ = this->GetSpace()->NewRegion(REGION_SIZE, RegionFlag::IS_OLD, RegionFlag::IS_RESERVED);
446 ASSERT(reservedRegion_ != nullptr);
447 reservedRegion_->RmvFlag(RegionFlag::IS_OLD);
448 }
449
450 template <typename AllocConfigT, typename LockConfigT>
ReleaseReservedRegion()451 void RegionAllocator<AllocConfigT, LockConfigT>::ReleaseReservedRegion()
452 {
453 ASSERT(reservedRegion_ != nullptr);
454 this->GetSpace()->template FreeRegion<RegionSpace::ReleaseRegionsPolicy::NoRelease, OSPagesPolicy::NO_RETURN>(
455 reservedRegion_);
456 reservedRegion_ = nullptr;
457 }
458
459 template <typename AllocConfigT, typename LockConfigT>
460 template <bool USE_MARKED_BITMAP>
PromoteYoungRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitor & aliveObjectsHandler)461 void RegionAllocator<AllocConfigT, LockConfigT>::PromoteYoungRegion(Region *region, const GCObjectVisitor &deathChecker,
462 const ObjectVisitor &aliveObjectsHandler)
463 {
464 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
465 // We should create live bitmap here and copy alive object in marked bitmap to it
466 region->CreateLiveBitmap();
467 region->CloneMarkBitmapToLiveBitmap();
468 auto visitor = [&aliveObjectsHandler, ®ion](ObjectHeader *object) {
469 aliveObjectsHandler(object);
470 region->IncreaseAllocatedObjects();
471 };
472 // NOLINTNEXTLINE(readability-braces-around-statements)
473 if constexpr (USE_MARKED_BITMAP) {
474 region->GetMarkBitmap()->IterateOverMarkedChunks(
475 [&visitor](void *objectAddr) { visitor(static_cast<ObjectHeader *>(objectAddr)); });
476 } else { // NOLINT(readability-misleading-indentation)
477 auto liveCheckVisitor = [&visitor, &deathChecker](ObjectHeader *object) {
478 if (deathChecker(object) == ObjectStatus::ALIVE_OBJECT) {
479 visitor(object);
480 }
481 };
482 region->IterateOverObjects(liveCheckVisitor);
483 }
484 // We set not actual value here but we will update it later
485 region->SetLiveBytes(region->GetAllocatedBytes());
486 this->GetSpace()->PromoteYoungRegion(region);
487 }
488
489 template <typename AllocConfigT, typename LockConfigT>
490 template <RegionFlag REGIONS_TYPE>
ResetAllSpecificRegions()491 void RegionAllocator<AllocConfigT, LockConfigT>::ResetAllSpecificRegions()
492 {
493 ResetCurrentRegion<false, REGIONS_TYPE>();
494 this->GetSpace()->IterateRegions([&](Region *region) {
495 if (!region->HasFlag(REGIONS_TYPE)) {
496 return;
497 }
498 this->GetSpace()->template FreeRegion<RegionSpace::ReleaseRegionsPolicy::NoRelease>(region);
499 });
500 if constexpr (REGIONS_TYPE == RegionFlag::IS_EDEN) {
501 retainedTlabs_.clear();
502 }
503 }
504
505 template <typename AllocConfigT, typename LockConfigT>
506 template <RegionFlag REGIONS_TYPE, RegionSpace::ReleaseRegionsPolicy REGIONS_RELEASE_POLICY,
507 OSPagesPolicy OS_PAGES_POLICY, bool NEED_LOCK, typename Container>
ResetSeveralSpecificRegions(const Container & regions)508 void RegionAllocator<AllocConfigT, LockConfigT>::ResetSeveralSpecificRegions(const Container ®ions)
509 {
510 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(this->regionLock_);
511 ASSERT(REGIONS_TYPE != RegionFlag::IS_EDEN);
512 ASSERT((REGIONS_TYPE != RegionFlag::IS_EDEN) || (retainedTlabs_.empty()));
513 for (Region *region : regions) {
514 ASSERT(!(IsInCurrentRegion<false, REGIONS_TYPE>(region)));
515 ASSERT(region->HasFlag(REGIONS_TYPE));
516 this->GetSpace()->template FreeRegion<REGIONS_RELEASE_POLICY, OS_PAGES_POLICY>(region);
517 }
518 }
519
520 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,size_t initSpaceSize,bool extend)521 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
522 MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType, size_t initSpaceSize, bool extend)
523 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, ObjectAllocator::GetAllocatorType(), initSpaceSize,
524 extend, REGION_SIZE, 0),
525 objectAllocator_(memStats, spaceType)
526 {
527 }
528
529 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,RegionPool * sharedRegionPool)530 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
531 MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType, RegionPool *sharedRegionPool)
532 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, ObjectAllocator::GetAllocatorType(),
533 sharedRegionPool, 0),
534 objectAllocator_(memStats, spaceType)
535 {
536 }
537
538 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Alloc(size_t size,Alignment align)539 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Alloc(size_t size, Alignment align)
540 {
541 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
542 size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
543 ASSERT(alignSize <= ObjectAllocator::GetMaxSize());
544
545 void *mem = objectAllocator_.Alloc(alignSize);
546 if (UNLIKELY(mem == nullptr)) {
547 mem = NewRegionAndRetryAlloc(size, align);
548 if (UNLIKELY(mem == nullptr)) {
549 return nullptr;
550 }
551 }
552 auto liveBitmap = this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap();
553 ASSERT(liveBitmap != nullptr);
554 liveBitmap->AtomicTestAndSet(mem);
555 return mem;
556 }
557
558 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Free(void * mem)559 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Free(void *mem)
560 {
561 this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap()->AtomicTestAndClear(mem);
562
563 objectAllocator_.Free(mem);
564 }
565
566 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Collect(const GCObjectVisitor & deathChecker)567 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Collect(const GCObjectVisitor &deathChecker)
568 {
569 os::memory::LockHolder lock(this->regionLock_);
570 objectAllocator_.Collect(deathChecker);
571 }
572
573 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
VisitAndRemoveFreeRegions(const RegionsVisitor & regionVisitor)574 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::VisitAndRemoveFreeRegions(
575 const RegionsVisitor ®ionVisitor)
576 {
577 os::memory::LockHolder lock(this->regionLock_);
578 // Add free region into vector to not do extra work with region_visitor
579 // inside object_allocator_.
580 PandaVector<Region *> freeRegions;
581
582 objectAllocator_.VisitAndRemoveFreePools([&freeRegions](void *mem, [[maybe_unused]] size_t size) {
583 auto *region = AddrToRegion(mem);
584 ASSERT(ToUintPtr(mem) + size == region->End());
585 // We don't remove this region here, because don't want to do some extra work with visitor here.
586 freeRegions.push_back(region);
587 });
588
589 if (!freeRegions.empty()) {
590 regionVisitor(freeRegions);
591
592 for (auto i : freeRegions) {
593 this->GetSpace()->FreeRegion(i);
594 }
595 }
596 }
597
598 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
NewRegionAndRetryAlloc(size_t objectSize,Alignment align)599 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::NewRegionAndRetryAlloc(size_t objectSize,
600 Alignment align)
601 {
602 os::memory::LockHolder lock(this->regionLock_);
603 size_t poolHeadSize = AlignUp(Region::HeadSize(), ObjectAllocator::PoolAlign());
604 ASSERT(AlignUp(poolHeadSize + objectSize, REGION_SIZE) == REGION_SIZE);
605 while (true) {
606 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_NONMOVABLE);
607 if (UNLIKELY(region == nullptr)) {
608 return nullptr;
609 }
610 ASSERT(region->GetLiveBitmap() != nullptr);
611 uintptr_t alignedPool = ToUintPtr(region) + poolHeadSize;
612 bool addedMemoryPool = objectAllocator_.AddMemoryPool(ToVoidPtr(alignedPool), REGION_SIZE - poolHeadSize);
613 ASSERT(addedMemoryPool);
614 if (UNLIKELY(!addedMemoryPool)) {
615 LOG(FATAL, ALLOC) << "ObjectAllocator: couldn't add memory pool to allocator";
616 }
617 void *mem = objectAllocator_.Alloc(objectSize, align);
618 if (LIKELY(mem != nullptr)) {
619 return mem;
620 }
621 }
622 return nullptr;
623 }
624
625 template <typename AllocConfigT, typename LockConfigT>
RegionHumongousAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType)626 RegionHumongousAllocator<AllocConfigT, LockConfigT>::RegionHumongousAllocator(MemStatsType *memStats,
627 GenerationalSpaces *spaces,
628 SpaceType spaceType)
629 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, 0, true,
630 REGION_SIZE, 0)
631 {
632 }
633
634 template <typename AllocConfigT, typename LockConfigT>
635 template <bool UPDATE_MEMSTATS>
Alloc(size_t size,Alignment align)636 void *RegionHumongousAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
637 {
638 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
639 size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
640 Region *region = nullptr;
641 void *mem = nullptr;
642 // allocate a seprate large region for object
643 {
644 os::memory::LockHolder lock(this->regionLock_);
645 region = this->template CreateAndSetUpNewRegion<AllocConfigT, OSPagesAllocPolicy::ZEROED_MEMORY>(
646 Region::RegionSize(alignSize, REGION_SIZE), IS_OLD, IS_LARGE_OBJECT);
647 if (LIKELY(region != nullptr)) {
648 mem = region->Alloc<false>(alignSize);
649 ASSERT(mem != nullptr);
650 ASSERT(region->GetLiveBitmap() != nullptr);
651 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
652 if constexpr (UPDATE_MEMSTATS) {
653 AllocConfigT::OnAlloc(region->Size(), this->spaceType_, this->memStats_);
654 // We don't set up memory here because the requested memory should
655 // be zeroed
656 }
657 // Do it after memory init because we can reach this memory after setting live bitmap
658 region->GetLiveBitmap()->AtomicTestAndSet(mem);
659 }
660 }
661 return mem;
662 }
663
664 template <typename AllocConfigT, typename LockConfigT>
CollectAndRemoveFreeRegions(const RegionsVisitor & regionVisitor,const GCObjectVisitor & deathChecker)665 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::CollectAndRemoveFreeRegions(
666 const RegionsVisitor ®ionVisitor, const GCObjectVisitor &deathChecker)
667 {
668 // Add free region into vector to not do extra work with region_visitor during region iteration
669 PandaVector<Region *> freeRegions;
670
671 {
672 os::memory::LockHolder lock(this->regionLock_);
673 this->GetSpace()->IterateRegions([this, &deathChecker, &freeRegions](Region *region) {
674 this->Collect(region, deathChecker);
675 if (region->HasFlag(IS_FREE)) {
676 freeRegions.push_back(region);
677 }
678 });
679 }
680
681 if (!freeRegions.empty()) {
682 regionVisitor(freeRegions);
683
684 for (auto i : freeRegions) {
685 os::memory::LockHolder lock(this->regionLock_);
686 ResetRegion(i);
687 }
688 }
689 }
690
691 template <typename AllocConfigT, typename LockConfigT>
Collect(Region * region,const GCObjectVisitor & deathChecker)692 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::Collect(Region *region, const GCObjectVisitor &deathChecker)
693 {
694 ASSERT(region->HasFlag(RegionFlag::IS_LARGE_OBJECT));
695 ObjectHeader *objectToProceed = nullptr;
696 objectToProceed = region->GetLargeObject();
697 if (deathChecker(objectToProceed) == ObjectStatus::DEAD_OBJECT) {
698 region->AddFlag(RegionFlag::IS_FREE);
699 }
700 }
701
702 template <typename AllocConfigT, typename LockConfigT>
ResetRegion(Region * region)703 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::ResetRegion(Region *region)
704 {
705 ASSERT(region->HasFlag(RegionFlag::IS_FREE));
706 region->RmvFlag(RegionFlag::IS_FREE);
707 this->GetSpace()->FreeRegion(region);
708 }
709
710 template <typename AllocConfigT, typename LockConfigT>
711 using RegionRunslotsAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, RunSlotsAllocator<AllocConfigT>>;
712
713 template <typename AllocConfigT, typename LockConfigT>
714 using RegionFreeListAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, FreeListAllocator<AllocConfigT>>;
715
716 } // namespace panda::mem
717
718 #endif // PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
719