1 /**
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #ifndef PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
16 #define PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
17
18 #include "libpandabase/mem/mem.h"
19 #include "libpandabase/utils/logger.h"
20 #include "runtime/include/runtime.h"
21 #include "runtime/include/thread.h"
22 #include "runtime/include/gc_task.h"
23 #include "runtime/mem/region_allocator.h"
24 #include "runtime/mem/region_space-inl.h"
25 #include "runtime/mem/runslots_allocator-inl.h"
26 #include "runtime/mem/freelist_allocator-inl.h"
27 #include "runtime/mem/alloc_config.h"
28 #include "runtime/arch/memory_helpers.h"
29
30 namespace ark::mem {
31
32 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,AllocatorType allocatorType,size_t initSpaceSize,bool extend,size_t regionSize,size_t emptyTenuredRegionsMaxCount)33 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces,
34 SpaceType spaceType, AllocatorType allocatorType,
35 size_t initSpaceSize, bool extend, size_t regionSize,
36 size_t emptyTenuredRegionsMaxCount)
37 : memStats_(memStats),
38 spaceType_(spaceType),
39 spaces_(spaces),
40 regionPool_(regionSize, extend, spaces,
41 InternalAllocatorPtr(InternalAllocator<>::GetInternalAllocatorFromRuntime())),
42 regionSpace_(spaceType, allocatorType, ®ionPool_, emptyTenuredRegionsMaxCount),
43 initBlock_(0, nullptr)
44 {
45 ASSERT(spaceType_ == SpaceType::SPACE_TYPE_OBJECT || spaceType_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT ||
46 spaceType_ == SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
47 ASSERT(regionSize != 0);
48 initBlock_ = NULLPOOL;
49 if (initSpaceSize > 0) {
50 ASSERT(initSpaceSize % regionSize == 0);
51 initBlock_ = spaces_->AllocSharedPool(initSpaceSize, spaceType, AllocatorType::REGION_ALLOCATOR, this);
52 ASSERT(initBlock_.GetMem() != nullptr);
53 ASSERT(initBlock_.GetSize() >= initSpaceSize);
54 if (initBlock_.GetMem() != nullptr) {
55 regionPool_.InitRegionBlock(ToUintPtr(initBlock_.GetMem()), ToUintPtr(initBlock_.GetMem()) + initSpaceSize);
56 ASAN_POISON_MEMORY_REGION(initBlock_.GetMem(), initBlock_.GetSize());
57 }
58 }
59 }
60
61 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,AllocatorType allocatorType,RegionPool * sharedRegionPool,size_t emptyTenuredRegionsMaxCount)62 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces,
63 SpaceType spaceType, AllocatorType allocatorType,
64 RegionPool *sharedRegionPool, size_t emptyTenuredRegionsMaxCount)
65 : memStats_(memStats),
66 spaces_(spaces),
67 spaceType_(spaceType),
68 regionPool_(0, false, spaces, nullptr), // unused
69 regionSpace_(spaceType, allocatorType, sharedRegionPool, emptyTenuredRegionsMaxCount),
70 initBlock_(0, nullptr) // unused
71 {
72 ASSERT(spaceType_ == SpaceType::SPACE_TYPE_OBJECT || spaceType_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
73 }
74
75 template <typename LockConfigT>
76 template <typename AllocConfigT, OSPagesAllocPolicy OS_ALLOC_POLICY>
CreateAndSetUpNewRegion(size_t regionSize,RegionFlag regionType,RegionFlag properties)77 Region *RegionAllocatorBase<LockConfigT>::CreateAndSetUpNewRegion(size_t regionSize, RegionFlag regionType,
78 RegionFlag properties)
79 {
80 Region *region = AllocRegion<OS_ALLOC_POLICY>(regionSize, regionType, properties);
81 if (LIKELY(region != nullptr)) {
82 if (regionType == RegionFlag::IS_EDEN) {
83 AllocConfigT::OnInitYoungRegion({region->Begin(), region->End()});
84 }
85 // Do memory barrier here to make sure all threads see references to bitmaps.
86 // The situation:
87 // A mutator thread allocates a new object. During object allocation the mutator
88 // allocates a new region, sets up the region header, allocates object in the region and publishes
89 // the reference to the object.
90 // GC thread does concurrent marking. It sees the reference to the new object and gets the region
91 // by the object address.
92 // Since GC thread doesn't locks region_lock_ we need to do memory barrier here to make
93 // sure GC thread sees all bitmaps from the region header.
94 arch::FullMemoryBarrier();
95 // Getting region by object is a bit operation and TSAN doesn't
96 // sees the relation between region creation and region access.
97 // This annotation suggests TSAN that this code always executes before
98 // the region will be accessed.
99 // See the corresponding annotation in ObjectToRegion
100 TSAN_ANNOTATE_HAPPENS_BEFORE(region);
101 }
102 return region;
103 }
104
105 template <typename LockConfigT>
GetAllRegions()106 PandaVector<Region *> RegionAllocatorBase<LockConfigT>::GetAllRegions()
107 {
108 PandaVector<Region *> vector;
109 os::memory::LockHolder lock(this->regionLock_);
110 GetSpace()->IterateRegions([&](Region *region) { vector.push_back(region); });
111 return vector;
112 }
113
114 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,size_t initSpaceSize,bool extend,size_t emptyTenuredRegionsMaxCount)115 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
116 SpaceType spaceType, size_t initSpaceSize, bool extend,
117 size_t emptyTenuredRegionsMaxCount)
118 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, initSpaceSize,
119 extend, REGION_SIZE, emptyTenuredRegionsMaxCount),
120 fullRegion_(nullptr, 0, 0),
121 edenCurrentRegion_(&fullRegion_)
122 {
123 }
124
125 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,RegionPool * sharedRegionPool,size_t emptyTenuredRegionsMaxCount)126 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
127 SpaceType spaceType, RegionPool *sharedRegionPool,
128 size_t emptyTenuredRegionsMaxCount)
129 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, sharedRegionPool,
130 emptyTenuredRegionsMaxCount),
131 fullRegion_(nullptr, 0, 0),
132 edenCurrentRegion_(&fullRegion_)
133 {
134 }
135
136 template <typename AllocConfigT, typename LockConfigT>
137 template <RegionFlag REGION_TYPE>
AllocRegular(size_t alignSize)138 void *RegionAllocator<AllocConfigT, LockConfigT>::AllocRegular(size_t alignSize)
139 {
140 static constexpr bool IS_ATOMIC = std::is_same_v<LockConfigT, RegionAllocatorLockConfig::CommonLock>;
141 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
142 if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
143 void *mem = GetCurrentRegion<IS_ATOMIC, REGION_TYPE>()->template Alloc<IS_ATOMIC>(alignSize);
144 if (mem != nullptr) {
145 return mem;
146 }
147
148 os::memory::LockHolder lock(this->regionLock_);
149 mem = GetCurrentRegion<IS_ATOMIC, REGION_TYPE>()->template Alloc<IS_ATOMIC>(alignSize);
150 if (mem != nullptr) {
151 return mem;
152 }
153
154 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGION_TYPE);
155 if (LIKELY(region != nullptr)) {
156 // Here we need memory barrier to make the allocation visible
157 // in all threads before SetCurrentRegion
158 mem = region->template Alloc<IS_ATOMIC>(alignSize);
159 SetCurrentRegion<IS_ATOMIC, REGION_TYPE>(region);
160 }
161
162 return mem;
163 }
164 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
165 if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
166 void *mem = nullptr;
167 Region *regionTo = PopFromRegionQueue<IS_ATOMIC, REGION_TYPE>();
168 if (regionTo != nullptr) {
169 // Here we need memory barrier to make the allocation visible
170 // in all threads before SetCurrentRegion
171 mem = regionTo->template Alloc<IS_ATOMIC>(alignSize);
172 if (mem != nullptr) {
173 PushToRegionQueue<IS_ATOMIC, REGION_TYPE>(regionTo);
174 return mem;
175 }
176 }
177
178 os::memory::LockHolder lock(this->regionLock_);
179 regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGION_TYPE);
180 if (LIKELY(regionTo != nullptr)) {
181 // Here we need memory barrier to make the allocation visible
182 // in all threads before SetCurrentRegion
183 mem = regionTo->template Alloc<IS_ATOMIC>(alignSize);
184 PushToRegionQueue<IS_ATOMIC, REGION_TYPE>(regionTo);
185 }
186
187 return mem;
188 }
189 UNREACHABLE();
190 return nullptr;
191 }
192
193 template <typename AllocConfigT, typename LockConfigT>
AllocRegularPinned(size_t alignSize)194 void *RegionAllocator<AllocConfigT, LockConfigT>::AllocRegularPinned(size_t alignSize)
195 {
196 static constexpr bool IS_ATOMIC = std::is_same_v<LockConfigT, RegionAllocatorLockConfig::CommonLock>;
197 void *mem = nullptr;
198 Region *regionTo = PopFromRegionQueue<IS_ATOMIC, RegionFlag::IS_PINNED>();
199 if (regionTo != nullptr) {
200 // Here we need memory barrier to make the allocation visible
201 // in all threads before SetCurrentRegion
202 mem = regionTo->template Alloc<IS_ATOMIC>(alignSize);
203 if (mem != nullptr) {
204 regionTo->PinObject();
205 PushToRegionQueue<IS_ATOMIC, RegionFlag::IS_PINNED>(regionTo);
206 return mem;
207 }
208 }
209
210 os::memory::LockHolder lock(this->regionLock_);
211 regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_OLD);
212 if (LIKELY(regionTo != nullptr)) {
213 // Here we need memory barrier to make the allocation visible
214 // in all threads before SetCurrentRegion
215 mem = regionTo->Alloc<IS_ATOMIC>(alignSize);
216 }
217 if (mem != nullptr) {
218 regionTo->PinObject();
219 PushToRegionQueue<IS_ATOMIC, RegionFlag::IS_PINNED>(regionTo);
220 }
221
222 return mem;
223 }
224
225 template <typename AllocConfigT, typename LockConfigT>
226 template <RegionFlag REGION_TYPE, bool UPDATE_MEMSTATS>
Alloc(size_t size,Alignment align,bool pinned)227 void *RegionAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align, bool pinned)
228 {
229 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
230 size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
231 void *mem = nullptr;
232 // for movable & regular size object, allocate it from a region
233 // for nonmovable or large size object, allocate a seprate large region for it
234 if (this->GetSpaceType() != SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT &&
235 LIKELY(alignSize <= GetMaxRegularObjectSize())) {
236 if (pinned) {
237 mem = AllocRegularPinned(alignSize);
238 } else {
239 mem = AllocRegular<REGION_TYPE>(alignSize);
240 }
241 } else {
242 os::memory::LockHolder lock(this->regionLock_);
243 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(
244 Region::RegionSize(alignSize, REGION_SIZE), REGION_TYPE, IS_LARGE_OBJECT);
245 if (LIKELY(region != nullptr)) {
246 mem = region->Alloc<false>(alignSize);
247 }
248 }
249 if (mem != nullptr) {
250 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
251 if constexpr (UPDATE_MEMSTATS) {
252 AllocConfigT::OnAlloc(alignSize, this->spaceType_, this->memStats_);
253 AllocConfigT::MemoryInit(mem);
254 }
255 }
256 return mem;
257 }
258
259 template <typename AllocConfigT, typename LockConfigT>
PinObject(ObjectHeader * object)260 void RegionAllocator<AllocConfigT, LockConfigT>::PinObject(ObjectHeader *object)
261 {
262 auto *region = ObjectToRegion(object);
263 ASSERT(region != nullptr);
264 region->PinObject();
265 }
266
267 template <typename AllocConfigT, typename LockConfigT>
UnpinObject(ObjectHeader * object)268 void RegionAllocator<AllocConfigT, LockConfigT>::UnpinObject(ObjectHeader *object)
269 {
270 auto *region = ObjectToRegion(object);
271 ASSERT(region != nullptr);
272 region->UnpinObject();
273 if (!region->HasPinnedObjects()) {
274 static constexpr bool IS_ATOMIC = std::is_same_v<LockConfigT, RegionAllocatorLockConfig::CommonLock>;
275 PandaVector<Region *> *regionQueue = GetRegionQueuePointer<RegionFlag::IS_PINNED>();
276 os::memory::LockHolder<LockConfigT, IS_ATOMIC> lock(*GetQueueLock<RegionFlag::IS_PINNED>());
277 auto itRegion = std::find(regionQueue->begin(), regionQueue->end(), region);
278 if (itRegion != regionQueue->end()) {
279 regionQueue->erase(itRegion);
280 }
281 }
282 }
283
284 template <typename AllocConfigT, typename LockConfigT>
CreateTLAB(size_t size)285 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateTLAB(size_t size)
286 {
287 ASSERT(size <= GetMaxRegularObjectSize());
288 ASSERT(AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT)) == size);
289 TLAB *tlab = nullptr;
290
291 {
292 os::memory::LockHolder lock(this->regionLock_);
293 Region *region = nullptr;
294 // first search in partial tlab map
295 auto largestTlab = retainedTlabs_.begin();
296 if (largestTlab != retainedTlabs_.end() && largestTlab->first >= size) {
297 LOG(DEBUG, ALLOC) << "Use retained tlabs region " << region;
298 region = largestTlab->second;
299 retainedTlabs_.erase(largestTlab);
300 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
301 }
302
303 // allocate a free region if none partial tlab has enough space
304 if (region == nullptr) {
305 region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_EDEN);
306 if (LIKELY(region != nullptr)) {
307 region->CreateTLABSupport();
308 }
309 }
310 if (region != nullptr) {
311 tlab = CreateTLABInRegion(region, size);
312 auto remainingSize = region->GetRemainingSizeForTLABs();
313 if (remainingSize >= size) {
314 LOG(DEBUG, ALLOC) << "Add a region " << region << " with remained size " << remainingSize
315 << " to retained_tlabs";
316 retainedTlabs_.insert(std::make_pair(remainingSize, region));
317 }
318 }
319 }
320
321 return tlab;
322 }
323
324 template <typename AllocConfigT, typename LockConfigT>
CreateRegionSizeTLAB()325 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateRegionSizeTLAB()
326 {
327 TLAB *tlab = nullptr;
328
329 os::memory::LockHolder lock(this->regionLock_);
330 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_EDEN);
331 if (LIKELY(region != nullptr)) {
332 region->CreateTLABSupport();
333 size_t size = region->GetRemainingSizeForTLABs();
334 tlab = CreateTLABInRegion(region, size);
335 }
336
337 return tlab;
338 }
339
340 template <typename AllocConfigT, typename LockConfigT>
CreateTLABInRegion(Region * region,size_t size)341 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateTLABInRegion(Region *region, size_t size)
342 {
343 // We don't reuse the same region for different TLABs.
344 // Therefore, update the size
345 TLAB *tlab = region->CreateTLAB(size);
346 ASSERT(tlab != nullptr);
347 LOG(DEBUG, ALLOC) << "Found a region " << region << " and create tlab " << tlab << " with memory starts at "
348 << tlab->GetStartAddr() << " and with size " << tlab->GetSize();
349 return tlab;
350 }
351
352 template <typename AllocConfigT, typename LockConfigT>
353 template <bool INCLUDE_CURRENT_REGION>
GetTopGarbageRegions()354 PandaPriorityQueue<std::pair<uint32_t, Region *>> RegionAllocator<AllocConfigT, LockConfigT>::GetTopGarbageRegions()
355 {
356 PandaPriorityQueue<std::pair<uint32_t, Region *>> queue;
357 this->GetSpace()->IterateRegions([&](Region *region) {
358 if (region->HasFlag(IS_EDEN) || region->HasFlag(RegionFlag::IS_RESERVED) || region->HasPinnedObjects()) {
359 return;
360 }
361 if constexpr (!INCLUDE_CURRENT_REGION) {
362 if (IsInCurrentRegion<true, RegionFlag::IS_OLD>(region)) {
363 return;
364 }
365 }
366 auto garbageBytes = region->GetGarbageBytes();
367 queue.push(std::pair<uint32_t, Region *>(garbageBytes, region));
368 });
369 return queue;
370 }
371
372 template <typename AllocConfigT, typename LockConfigT>
373 template <RegionFlag REGIONS_TYPE>
GetAllSpecificRegions()374 PandaVector<Region *> RegionAllocator<AllocConfigT, LockConfigT>::GetAllSpecificRegions()
375 {
376 PandaVector<Region *> vector;
377 this->GetSpace()->IterateRegions([&](Region *region) {
378 if (region->HasFlag(REGIONS_TYPE)) {
379 vector.push_back(region);
380 }
381 });
382 return vector;
383 }
384
385 template <typename AllocConfigT, typename LockConfigT>
386 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactAllSpecificRegions(const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)387 void RegionAllocator<AllocConfigT, LockConfigT>::CompactAllSpecificRegions(const GCObjectVisitor &deathChecker,
388 const ObjectVisitorEx &moveHandler)
389 {
390 // NOLINTNEXTLINE(readability-braces-around-statements)
391 if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) { // NOLINT(bugprone-suspicious-semicolon)
392 // NOTE(aemelenko): Implement it if need to call this method with the same regions type.
393 // There is an issue with IterateRegions during creating a new one.
394 ASSERT(REGIONS_TYPE_FROM != REGIONS_TYPE_TO);
395 ResetCurrentRegion<false, REGIONS_TYPE_TO>();
396 }
397 this->GetSpace()->IterateRegions([this, &deathChecker, &moveHandler](Region *region) {
398 if (!region->HasFlag(REGIONS_TYPE_FROM)) {
399 return;
400 }
401 CompactSpecificRegion<REGIONS_TYPE_FROM, REGIONS_TYPE_TO, USE_MARKED_BITMAP>(region, deathChecker, moveHandler);
402 });
403 }
404
405 template <typename AllocConfigT, typename LockConfigT>
406 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactSeveralSpecificRegions(const PandaVector<Region * > & regions,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)407 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSeveralSpecificRegions(const PandaVector<Region *> ®ions,
408 const GCObjectVisitor &deathChecker,
409 const ObjectVisitorEx &moveHandler)
410 {
411 for (auto i : regions) {
412 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
413 if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) {
414 [[maybe_unused]] bool foundedRegion = IsInCurrentRegion<false, REGIONS_TYPE_TO>(i);
415 ASSERT(!foundedRegion);
416 }
417 CompactSpecificRegion<REGIONS_TYPE_FROM, REGIONS_TYPE_TO, USE_MARKED_BITMAP>(i, deathChecker, moveHandler);
418 }
419 }
420
421 template <typename AllocConfigT, typename LockConfigT>
422 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactSpecificRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)423 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSpecificRegion(Region *region,
424 const GCObjectVisitor &deathChecker,
425 const ObjectVisitorEx &moveHandler)
426 {
427 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
428 if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) {
429 // It is bad if we compact one region into itself.
430 [[maybe_unused]] bool isCurrentRegion = IsInCurrentRegion<true, REGIONS_TYPE_TO>(region);
431 ASSERT(!isCurrentRegion);
432 }
433 auto createNewRegion = [&]() {
434 os::memory::LockHolder lock(this->regionLock_);
435 Region *regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGIONS_TYPE_TO);
436 ASSERT(regionTo != nullptr);
437 return regionTo;
438 };
439
440 Region *regionTo = PopFromRegionQueue<true, REGIONS_TYPE_TO>();
441 if (regionTo == nullptr) {
442 regionTo = createNewRegion();
443 }
444 size_t liveBytes = 0;
445 // Don't use atomic in this method because we work with not shared region
446 auto visitor = [&](ObjectHeader *object) {
447 // If we use mark-bitmap then we iterate over alive object, so no need death-checker
448 if constexpr (!USE_MARKED_BITMAP) {
449 if (deathChecker(object) != ObjectStatus::ALIVE_OBJECT) {
450 return;
451 }
452 }
453 size_t objectSize = GetObjectSize(object);
454 size_t alignedSize = AlignUp(objectSize, DEFAULT_ALIGNMENT_IN_BYTES);
455 void *dst = regionTo->template Alloc<false>(alignedSize);
456 if (dst == nullptr) {
457 regionTo->SetLiveBytes(regionTo->GetLiveBytes() + liveBytes);
458 liveBytes = 0;
459 regionTo = createNewRegion();
460 dst = regionTo->template Alloc<false>(alignedSize);
461 }
462 // Don't initialize memory for an object here because we will use memcpy anyway
463 ASSERT(dst != nullptr);
464 memcpy_s(dst, objectSize, object, objectSize);
465 // need to mark as alive moved object
466 ASSERT(regionTo->GetLiveBitmap() != nullptr);
467 regionTo->IncreaseAllocatedObjects();
468 regionTo->GetLiveBitmap()->Set(dst);
469 liveBytes += alignedSize;
470 moveHandler(object, static_cast<ObjectHeader *>(dst));
471 };
472
473 ASSERT(region->HasFlag(REGIONS_TYPE_FROM));
474
475 const std::function<void(ObjectHeader *)> visitorFunctor(visitor);
476 // NOLINTNEXTLINE(readability-braces-around-statements)
477 if constexpr (USE_MARKED_BITMAP) {
478 region->GetMarkBitmap()->IterateOverMarkedChunks(
479 [&visitorFunctor](void *objectAddr) { visitorFunctor(static_cast<ObjectHeader *>(objectAddr)); });
480 } else { // NOLINT(readability-misleading-indentation)
481 region->IterateOverObjects(visitorFunctor);
482 }
483 regionTo->SetLiveBytes(regionTo->GetLiveBytes() + liveBytes);
484
485 PushToRegionQueue<true, REGIONS_TYPE_TO>(regionTo);
486 }
487
488 template <typename AllocConfigT, typename LockConfigT>
ReserveRegionIfNeeded()489 void RegionAllocator<AllocConfigT, LockConfigT>::ReserveRegionIfNeeded()
490 {
491 if (reservedRegion_ != nullptr) {
492 return;
493 }
494 reservedRegion_ = this->GetSpace()->NewRegion(REGION_SIZE, RegionFlag::IS_OLD, RegionFlag::IS_RESERVED);
495 ASSERT(reservedRegion_ != nullptr);
496 reservedRegion_->RmvFlag(RegionFlag::IS_OLD);
497 }
498
499 template <typename AllocConfigT, typename LockConfigT>
ReleaseReservedRegion()500 void RegionAllocator<AllocConfigT, LockConfigT>::ReleaseReservedRegion()
501 {
502 ASSERT(reservedRegion_ != nullptr);
503 this->GetSpace()->template FreeRegion<RegionSpace::ReleaseRegionsPolicy::NoRelease, OSPagesPolicy::NO_RETURN>(
504 reservedRegion_);
505 reservedRegion_ = nullptr;
506 }
507
508 template <typename AllocConfigT, typename LockConfigT>
509 template <bool USE_MARKED_BITMAP>
PromoteYoungRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitor & aliveObjectsHandler)510 void RegionAllocator<AllocConfigT, LockConfigT>::PromoteYoungRegion(Region *region, const GCObjectVisitor &deathChecker,
511 const ObjectVisitor &aliveObjectsHandler)
512 {
513 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
514 // We should create live bitmap here and copy alive object in marked bitmap to it
515 region->CreateLiveBitmap();
516 region->CloneMarkBitmapToLiveBitmap();
517 auto visitor = [&aliveObjectsHandler, ®ion](ObjectHeader *object) {
518 aliveObjectsHandler(object);
519 region->IncreaseAllocatedObjects();
520 };
521 // NOLINTNEXTLINE(readability-braces-around-statements)
522 if constexpr (USE_MARKED_BITMAP) {
523 region->GetMarkBitmap()->IterateOverMarkedChunks(
524 [&visitor](void *objectAddr) { visitor(static_cast<ObjectHeader *>(objectAddr)); });
525 } else { // NOLINT(readability-misleading-indentation)
526 auto liveCheckVisitor = [&visitor, &deathChecker](ObjectHeader *object) {
527 if (deathChecker(object) == ObjectStatus::ALIVE_OBJECT) {
528 visitor(object);
529 }
530 };
531 region->IterateOverObjects(liveCheckVisitor);
532 }
533 // We set not actual value here but we will update it later
534 region->SetLiveBytes(region->GetAllocatedBytes());
535 this->GetSpace()->PromoteYoungRegion(region);
536 }
537
538 template <typename AllocConfigT, typename LockConfigT>
539 template <RegionFlag REGIONS_TYPE>
ResetAllSpecificRegions()540 void RegionAllocator<AllocConfigT, LockConfigT>::ResetAllSpecificRegions()
541 {
542 ResetCurrentRegion<false, REGIONS_TYPE>();
543 this->GetSpace()->IterateRegions([&](Region *region) {
544 if (!region->HasFlag(REGIONS_TYPE)) {
545 return;
546 }
547 this->GetSpace()->template FreeRegion<RegionSpace::ReleaseRegionsPolicy::NoRelease>(region);
548 });
549 if constexpr (REGIONS_TYPE == RegionFlag::IS_EDEN) {
550 retainedTlabs_.clear();
551 }
552 }
553
554 template <typename AllocConfigT, typename LockConfigT>
555 template <RegionFlag REGIONS_TYPE, RegionSpace::ReleaseRegionsPolicy REGIONS_RELEASE_POLICY,
556 OSPagesPolicy OS_PAGES_POLICY, bool NEED_LOCK, typename Container>
ResetSeveralSpecificRegions(const Container & regions)557 void RegionAllocator<AllocConfigT, LockConfigT>::ResetSeveralSpecificRegions(const Container ®ions)
558 {
559 os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(this->regionLock_);
560 ASSERT(REGIONS_TYPE != RegionFlag::IS_EDEN);
561 ASSERT((REGIONS_TYPE != RegionFlag::IS_EDEN) || (retainedTlabs_.empty()));
562 for (Region *region : regions) {
563 ASSERT(!(IsInCurrentRegion<false, REGIONS_TYPE>(region)));
564 ASSERT(region->HasFlag(REGIONS_TYPE));
565 this->GetSpace()->template FreeRegion<REGIONS_RELEASE_POLICY, OS_PAGES_POLICY>(region);
566 }
567 }
568
569 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,size_t initSpaceSize,bool extend)570 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
571 MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType, size_t initSpaceSize, bool extend)
572 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, ObjectAllocator::GetAllocatorType(), initSpaceSize,
573 extend, REGION_SIZE, 0),
574 objectAllocator_(memStats, spaceType)
575 {
576 }
577
578 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,RegionPool * sharedRegionPool)579 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
580 MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType, RegionPool *sharedRegionPool)
581 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, ObjectAllocator::GetAllocatorType(),
582 sharedRegionPool, 0),
583 objectAllocator_(memStats, spaceType)
584 {
585 }
586
587 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Alloc(size_t size,Alignment align)588 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Alloc(size_t size, Alignment align)
589 {
590 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
591 size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
592 ASSERT(alignSize <= ObjectAllocator::GetMaxSize());
593
594 void *mem = objectAllocator_.Alloc(alignSize);
595 if (UNLIKELY(mem == nullptr)) {
596 mem = NewRegionAndRetryAlloc(size, align);
597 if (UNLIKELY(mem == nullptr)) {
598 return nullptr;
599 }
600 }
601 auto liveBitmap = this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap();
602 ASSERT(liveBitmap != nullptr);
603 liveBitmap->AtomicTestAndSet(mem);
604 return mem;
605 }
606
607 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Free(void * mem)608 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Free(void *mem)
609 {
610 this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap()->AtomicTestAndClear(mem);
611
612 objectAllocator_.Free(mem);
613 }
614
615 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Collect(const GCObjectVisitor & deathChecker)616 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Collect(const GCObjectVisitor &deathChecker)
617 {
618 os::memory::LockHolder lock(this->regionLock_);
619 objectAllocator_.Collect(deathChecker);
620 }
621
622 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
VisitAndRemoveFreeRegions(const RegionsVisitor & regionVisitor)623 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::VisitAndRemoveFreeRegions(
624 const RegionsVisitor ®ionVisitor)
625 {
626 os::memory::LockHolder lock(this->regionLock_);
627 // Add free region into vector to not do extra work with region_visitor
628 // inside object_allocator_.
629 PandaVector<Region *> freeRegions;
630
631 objectAllocator_.VisitAndRemoveFreePools([&freeRegions](void *mem, [[maybe_unused]] size_t size) {
632 auto *region = AddrToRegion(mem);
633 ASSERT(ToUintPtr(mem) + size == region->End());
634 // We don't remove this region here, because don't want to do some extra work with visitor here.
635 freeRegions.push_back(region);
636 });
637
638 if (!freeRegions.empty()) {
639 regionVisitor(freeRegions);
640
641 for (auto i : freeRegions) {
642 this->GetSpace()->FreeRegion(i);
643 }
644 }
645 }
646
647 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
NewRegionAndRetryAlloc(size_t objectSize,Alignment align)648 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::NewRegionAndRetryAlloc(size_t objectSize,
649 Alignment align)
650 {
651 os::memory::LockHolder lock(this->regionLock_);
652 size_t poolHeadSize = AlignUp(Region::HeadSize(), ObjectAllocator::PoolAlign());
653 ASSERT(AlignUp(poolHeadSize + objectSize, REGION_SIZE) == REGION_SIZE);
654 while (true) {
655 Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_NONMOVABLE);
656 if (UNLIKELY(region == nullptr)) {
657 return nullptr;
658 }
659 ASSERT(region->GetLiveBitmap() != nullptr);
660 uintptr_t alignedPool = ToUintPtr(region) + poolHeadSize;
661 bool addedMemoryPool = objectAllocator_.AddMemoryPool(ToVoidPtr(alignedPool), REGION_SIZE - poolHeadSize);
662 ASSERT(addedMemoryPool);
663 if (UNLIKELY(!addedMemoryPool)) {
664 LOG(FATAL, ALLOC) << "ObjectAllocator: couldn't add memory pool to allocator";
665 }
666 void *mem = objectAllocator_.Alloc(objectSize, align);
667 if (LIKELY(mem != nullptr)) {
668 return mem;
669 }
670 }
671 return nullptr;
672 }
673
674 template <typename AllocConfigT, typename LockConfigT>
RegionHumongousAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType)675 RegionHumongousAllocator<AllocConfigT, LockConfigT>::RegionHumongousAllocator(MemStatsType *memStats,
676 GenerationalSpaces *spaces,
677 SpaceType spaceType)
678 : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, 0, true,
679 REGION_SIZE, 0)
680 {
681 }
682
683 template <typename AllocConfigT, typename LockConfigT>
684 template <bool UPDATE_MEMSTATS>
Alloc(size_t size,Alignment align)685 void *RegionHumongousAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
686 {
687 ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
688 size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
689 Region *region = nullptr;
690 void *mem = nullptr;
691 // allocate a seprate large region for object
692 {
693 os::memory::LockHolder lock(this->regionLock_);
694 region = this->template CreateAndSetUpNewRegion<AllocConfigT, OSPagesAllocPolicy::ZEROED_MEMORY>(
695 Region::RegionSize(alignSize, REGION_SIZE), IS_OLD, IS_LARGE_OBJECT);
696 if (LIKELY(region != nullptr)) {
697 mem = region->Alloc<false>(alignSize);
698 ASSERT(mem != nullptr);
699 ASSERT(region->GetLiveBitmap() != nullptr);
700 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
701 if constexpr (UPDATE_MEMSTATS) {
702 AllocConfigT::OnAlloc(region->Size(), this->spaceType_, this->memStats_);
703 // We don't set up memory here because the requested memory should
704 // be zeroed
705 }
706 // Do it after memory init because we can reach this memory after setting live bitmap
707 region->GetLiveBitmap()->AtomicTestAndSet(mem);
708 }
709 }
710 return mem;
711 }
712
713 template <typename AllocConfigT, typename LockConfigT>
CollectAndRemoveFreeRegions(const RegionsVisitor & regionVisitor,const GCObjectVisitor & deathChecker)714 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::CollectAndRemoveFreeRegions(
715 const RegionsVisitor ®ionVisitor, const GCObjectVisitor &deathChecker)
716 {
717 // Add free region into vector to not do extra work with region_visitor during region iteration
718 PandaVector<Region *> freeRegions;
719
720 {
721 os::memory::LockHolder lock(this->regionLock_);
722 this->GetSpace()->IterateRegions([this, &deathChecker, &freeRegions](Region *region) {
723 this->Collect(region, deathChecker);
724 if (region->HasFlag(IS_FREE)) {
725 freeRegions.push_back(region);
726 }
727 });
728 }
729
730 if (!freeRegions.empty()) {
731 regionVisitor(freeRegions);
732
733 for (auto i : freeRegions) {
734 os::memory::LockHolder lock(this->regionLock_);
735 ResetRegion(i);
736 }
737 }
738 }
739
740 template <typename AllocConfigT, typename LockConfigT>
Collect(Region * region,const GCObjectVisitor & deathChecker)741 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::Collect(Region *region, const GCObjectVisitor &deathChecker)
742 {
743 ASSERT(region->HasFlag(RegionFlag::IS_LARGE_OBJECT));
744 ObjectHeader *objectToProceed = nullptr;
745 objectToProceed = region->GetLargeObject();
746 if (deathChecker(objectToProceed) == ObjectStatus::DEAD_OBJECT) {
747 region->AddFlag(RegionFlag::IS_FREE);
748 }
749 }
750
751 template <typename AllocConfigT, typename LockConfigT>
ResetRegion(Region * region)752 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::ResetRegion(Region *region)
753 {
754 ASSERT(region->HasFlag(RegionFlag::IS_FREE));
755 region->RmvFlag(RegionFlag::IS_FREE);
756 this->GetSpace()->FreeRegion(region);
757 }
758
759 template <typename AllocConfigT, typename LockConfigT>
760 using RegionRunslotsAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, RunSlotsAllocator<AllocConfigT>>;
761
762 template <typename AllocConfigT, typename LockConfigT>
763 using RegionFreeListAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, FreeListAllocator<AllocConfigT>>;
764
765 } // namespace ark::mem
766
767 #endif // PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
768